Revise -mdisable-fpregs option and add new -msoft-mult option
[official-gcc.git] / gcc / omp-low.c
blob15e4424b0bcd02f6cf7a72ba0b10d0c4e47b8e80
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
198 #define WALK_SUBSTMTS \
199 case GIMPLE_BIND: \
200 case GIMPLE_TRY: \
201 case GIMPLE_CATCH: \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
206 break;
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
211 static bool
212 is_oacc_parallel_or_serial (omp_context *ctx)
214 enum gimple_code outer_type = gimple_code (ctx->stmt);
215 return ((outer_type == GIMPLE_OMP_TARGET)
216 && ((gimple_omp_target_kind (ctx->stmt)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 || (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
225 static bool
226 is_oacc_kernels (omp_context *ctx)
228 enum gimple_code outer_type = gimple_code (ctx->stmt);
229 return ((outer_type == GIMPLE_OMP_TARGET)
230 && (gimple_omp_target_kind (ctx->stmt)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
236 static bool
237 is_oacc_kernels_decomposed_part (omp_context *ctx)
239 enum gimple_code outer_type = gimple_code (ctx->stmt);
240 return ((outer_type == GIMPLE_OMP_TARGET)
241 && ((gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 || (gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
250 static bool
251 is_omp_target (gimple *stmt)
253 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
255 int kind = gimple_omp_target_kind (stmt);
256 return (kind == GF_OMP_TARGET_KIND_REGION
257 || kind == GF_OMP_TARGET_KIND_DATA
258 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
261 return false;
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
268 tree
269 omp_member_access_dummy_var (tree decl)
271 if (!VAR_P (decl)
272 || !DECL_ARTIFICIAL (decl)
273 || !DECL_IGNORED_P (decl)
274 || !DECL_HAS_VALUE_EXPR_P (decl)
275 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276 return NULL_TREE;
278 tree v = DECL_VALUE_EXPR (decl);
279 if (TREE_CODE (v) != COMPONENT_REF)
280 return NULL_TREE;
282 while (1)
283 switch (TREE_CODE (v))
285 case COMPONENT_REF:
286 case MEM_REF:
287 case INDIRECT_REF:
288 CASE_CONVERT:
289 case POINTER_PLUS_EXPR:
290 v = TREE_OPERAND (v, 0);
291 continue;
292 case PARM_DECL:
293 if (DECL_CONTEXT (v) == current_function_decl
294 && DECL_ARTIFICIAL (v)
295 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 return v;
297 return NULL_TREE;
298 default:
299 return NULL_TREE;
303 /* Helper for unshare_and_remap, called through walk_tree. */
305 static tree
306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
308 tree *pair = (tree *) data;
309 if (*tp == pair[0])
311 *tp = unshare_expr (pair[1]);
312 *walk_subtrees = 0;
314 else if (IS_TYPE_OR_DECL_P (*tp))
315 *walk_subtrees = 0;
316 return NULL_TREE;
319 /* Return unshare_expr (X) with all occurrences of FROM
320 replaced with TO. */
322 static tree
323 unshare_and_remap (tree x, tree from, tree to)
325 tree pair[2] = { from, to };
326 x = unshare_expr (x);
327 walk_tree (&x, unshare_and_remap_1, pair, NULL);
328 return x;
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
333 static inline tree
334 scan_omp_op (tree *tp, omp_context *ctx)
336 struct walk_stmt_info wi;
338 memset (&wi, 0, sizeof (wi));
339 wi.info = ctx;
340 wi.want_locations = true;
342 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
349 /* Return true if CTX is for an omp parallel. */
351 static inline bool
352 is_parallel_ctx (omp_context *ctx)
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
358 /* Return true if CTX is for an omp task. */
360 static inline bool
361 is_task_ctx (omp_context *ctx)
363 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
367 /* Return true if CTX is for an omp taskloop. */
369 static inline bool
370 is_taskloop_ctx (omp_context *ctx)
372 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
377 /* Return true if CTX is for a host omp teams. */
379 static inline bool
380 is_host_teams_ctx (omp_context *ctx)
382 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
390 static inline bool
391 is_taskreg_ctx (omp_context *ctx)
393 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
396 /* Return true if EXPR is variable sized. */
398 static inline bool
399 is_variable_sized (const_tree expr)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
408 static inline tree
409 lookup_decl (tree var, omp_context *ctx)
411 tree *n = ctx->cb.decl_map->get (var);
412 return *n;
415 static inline tree
416 maybe_lookup_decl (const_tree var, omp_context *ctx)
418 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419 return n ? *n : NULL_TREE;
422 static inline tree
423 lookup_field (tree var, omp_context *ctx)
425 splay_tree_node n;
426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427 return (tree) n->value;
430 static inline tree
431 lookup_sfield (splay_tree_key key, omp_context *ctx)
433 splay_tree_node n;
434 n = splay_tree_lookup (ctx->sfield_map
435 ? ctx->sfield_map : ctx->field_map, key);
436 return (tree) n->value;
439 static inline tree
440 lookup_sfield (tree var, omp_context *ctx)
442 return lookup_sfield ((splay_tree_key) var, ctx);
445 static inline tree
446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
448 splay_tree_node n;
449 n = splay_tree_lookup (ctx->field_map, key);
450 return n ? (tree) n->value : NULL_TREE;
453 static inline tree
454 maybe_lookup_field (tree var, omp_context *ctx)
456 return maybe_lookup_field ((splay_tree_key) var, ctx);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
462 static bool
463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466 || TYPE_ATOMIC (TREE_TYPE (decl)))
467 return true;
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
471 if (shared_ctx)
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 return true;
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 return true;
489 /* Do not use copy-in/copy-out for variables that have their
490 address taken. */
491 if (is_global_var (decl))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl))
501 if (!global_nonaddressable_vars)
502 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars,
507 DECL_UID (decl)))
508 return true;
510 else if (TREE_ADDRESSABLE (decl))
511 return true;
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
514 for these. */
515 if (TREE_READONLY (decl)
516 || ((TREE_CODE (decl) == RESULT_DECL
517 || TREE_CODE (decl) == PARM_DECL)
518 && DECL_BY_REFERENCE (decl)))
519 return false;
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx->is_nested)
528 omp_context *up;
530 for (up = shared_ctx->outer; up; up = up->outer)
531 if ((is_taskreg_ctx (up)
532 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up->stmt)))
534 && maybe_lookup_decl (decl, up))
535 break;
537 if (up)
539 tree c;
541 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
543 for (c = gimple_omp_target_clauses (up->stmt);
544 c; c = OMP_CLAUSE_CHAIN (c))
545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c) == decl)
547 break;
549 else
550 for (c = gimple_omp_taskreg_clauses (up->stmt);
551 c; c = OMP_CLAUSE_CHAIN (c))
552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c) == decl)
554 break;
556 if (c)
557 goto maybe_mark_addressable_and_ret;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx))
566 tree outer;
567 maybe_mark_addressable_and_ret:
568 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
573 variable. */
574 if (!task_shared_vars)
575 task_shared_vars = BITMAP_ALLOC (NULL);
576 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 TREE_ADDRESSABLE (outer) = 1;
579 return true;
583 return false;
586 /* Construct a new automatic decl similar to VAR. */
588 static tree
589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
591 tree copy = copy_var_decl (var, name, type);
593 DECL_CONTEXT (copy) = current_function_decl;
595 if (ctx)
597 DECL_CHAIN (copy) = ctx->block_vars;
598 ctx->block_vars = copy;
600 else
601 record_vars (copy);
603 /* If VAR is listed in task_shared_vars, it means it wasn't
604 originally addressable and is just because task needs to take
605 it's address. But we don't need to take address of privatizations
606 from that var. */
607 if (TREE_ADDRESSABLE (var)
608 && ((task_shared_vars
609 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
610 || (global_nonaddressable_vars
611 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
612 TREE_ADDRESSABLE (copy) = 0;
614 return copy;
617 static tree
618 omp_copy_decl_1 (tree var, omp_context *ctx)
620 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
623 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
624 as appropriate. */
625 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
627 static tree
628 omp_build_component_ref (tree obj, tree field)
630 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
631 if (TREE_THIS_VOLATILE (field))
632 TREE_THIS_VOLATILE (ret) |= 1;
633 if (TREE_READONLY (field))
634 TREE_READONLY (ret) |= 1;
635 return ret;
638 /* Build tree nodes to access the field for VAR on the receiver side. */
640 static tree
641 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
643 tree x, field = lookup_field (var, ctx);
645 /* If the receiver record type was remapped in the child function,
646 remap the field into the new record type. */
647 x = maybe_lookup_field (field, ctx);
648 if (x != NULL)
649 field = x;
651 x = build_simple_mem_ref (ctx->receiver_decl);
652 TREE_THIS_NOTRAP (x) = 1;
653 x = omp_build_component_ref (x, field);
654 if (by_ref)
656 x = build_simple_mem_ref (x);
657 TREE_THIS_NOTRAP (x) = 1;
660 return x;
663 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
664 of a parallel, this is a component reference; for workshare constructs
665 this is some variable. */
667 static tree
668 build_outer_var_ref (tree var, omp_context *ctx,
669 enum omp_clause_code code = OMP_CLAUSE_ERROR)
671 tree x;
672 omp_context *outer = ctx->outer;
673 for (; outer; outer = outer->outer)
675 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
676 continue;
677 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
678 && !maybe_lookup_decl (var, outer))
679 continue;
680 break;
683 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
684 x = var;
685 else if (is_variable_sized (var))
687 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
688 x = build_outer_var_ref (x, ctx, code);
689 x = build_simple_mem_ref (x);
691 else if (is_taskreg_ctx (ctx))
693 bool by_ref = use_pointer_for_field (var, NULL);
694 x = build_receiver_ref (var, by_ref, ctx);
696 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
697 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
698 || ctx->loop_p
699 || (code == OMP_CLAUSE_PRIVATE
700 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
701 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
702 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
704 /* #pragma omp simd isn't a worksharing construct, and can reference
705 even private vars in its linear etc. clauses.
706 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
707 to private vars in all worksharing constructs. */
708 x = NULL_TREE;
709 if (outer && is_taskreg_ctx (outer))
710 x = lookup_decl (var, outer);
711 else if (outer)
712 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
713 if (x == NULL_TREE)
714 x = var;
716 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
718 gcc_assert (outer);
719 splay_tree_node n
720 = splay_tree_lookup (outer->field_map,
721 (splay_tree_key) &DECL_UID (var));
722 if (n == NULL)
724 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
725 x = var;
726 else
727 x = lookup_decl (var, outer);
729 else
731 tree field = (tree) n->value;
732 /* If the receiver record type was remapped in the child function,
733 remap the field into the new record type. */
734 x = maybe_lookup_field (field, outer);
735 if (x != NULL)
736 field = x;
738 x = build_simple_mem_ref (outer->receiver_decl);
739 x = omp_build_component_ref (x, field);
740 if (use_pointer_for_field (var, outer))
741 x = build_simple_mem_ref (x);
744 else if (outer)
745 x = lookup_decl (var, outer);
746 else if (omp_privatize_by_reference (var))
747 /* This can happen with orphaned constructs. If var is reference, it is
748 possible it is shared and as such valid. */
749 x = var;
750 else if (omp_member_access_dummy_var (var))
751 x = var;
752 else
753 gcc_unreachable ();
755 if (x == var)
757 tree t = omp_member_access_dummy_var (var);
758 if (t)
760 x = DECL_VALUE_EXPR (var);
761 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
762 if (o != t)
763 x = unshare_and_remap (x, t, o);
764 else
765 x = unshare_expr (x);
769 if (omp_privatize_by_reference (var))
770 x = build_simple_mem_ref (x);
772 return x;
775 /* Build tree nodes to access the field for VAR on the sender side. */
777 static tree
778 build_sender_ref (splay_tree_key key, omp_context *ctx)
780 tree field = lookup_sfield (key, ctx);
781 return omp_build_component_ref (ctx->sender_decl, field);
784 static tree
785 build_sender_ref (tree var, omp_context *ctx)
787 return build_sender_ref ((splay_tree_key) var, ctx);
790 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
791 BASE_POINTERS_RESTRICT, declare the field with restrict. */
793 static void
794 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
796 tree field, type, sfield = NULL_TREE;
797 splay_tree_key key = (splay_tree_key) var;
799 if ((mask & 16) != 0)
801 key = (splay_tree_key) &DECL_NAME (var);
802 gcc_checking_assert (key != (splay_tree_key) var);
804 if ((mask & 8) != 0)
806 key = (splay_tree_key) &DECL_UID (var);
807 gcc_checking_assert (key != (splay_tree_key) var);
809 gcc_assert ((mask & 1) == 0
810 || !splay_tree_lookup (ctx->field_map, key));
811 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
812 || !splay_tree_lookup (ctx->sfield_map, key));
813 gcc_assert ((mask & 3) == 3
814 || !is_gimple_omp_oacc (ctx->stmt));
816 type = TREE_TYPE (var);
817 if ((mask & 16) != 0)
818 type = lang_hooks.decls.omp_array_data (var, true);
820 /* Prevent redeclaring the var in the split-off function with a restrict
821 pointer type. Note that we only clear type itself, restrict qualifiers in
822 the pointed-to type will be ignored by points-to analysis. */
823 if (POINTER_TYPE_P (type)
824 && TYPE_RESTRICT (type))
825 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
827 if (mask & 4)
829 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
830 type = build_pointer_type (build_pointer_type (type));
832 else if (by_ref)
833 type = build_pointer_type (type);
834 else if ((mask & (32 | 3)) == 1
835 && omp_privatize_by_reference (var))
836 type = TREE_TYPE (type);
838 field = build_decl (DECL_SOURCE_LOCATION (var),
839 FIELD_DECL, DECL_NAME (var), type);
841 /* Remember what variable this field was created for. This does have a
842 side effect of making dwarf2out ignore this member, so for helpful
843 debugging we clear it later in delete_omp_context. */
844 DECL_ABSTRACT_ORIGIN (field) = var;
845 if ((mask & 16) == 0 && type == TREE_TYPE (var))
847 SET_DECL_ALIGN (field, DECL_ALIGN (var));
848 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
849 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
851 else
852 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
854 if ((mask & 3) == 3)
856 insert_field_into_struct (ctx->record_type, field);
857 if (ctx->srecord_type)
859 sfield = build_decl (DECL_SOURCE_LOCATION (var),
860 FIELD_DECL, DECL_NAME (var), type);
861 DECL_ABSTRACT_ORIGIN (sfield) = var;
862 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
863 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
864 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
865 insert_field_into_struct (ctx->srecord_type, sfield);
868 else
870 if (ctx->srecord_type == NULL_TREE)
872 tree t;
874 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
875 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
876 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
878 sfield = build_decl (DECL_SOURCE_LOCATION (t),
879 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
880 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
881 insert_field_into_struct (ctx->srecord_type, sfield);
882 splay_tree_insert (ctx->sfield_map,
883 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
884 (splay_tree_value) sfield);
887 sfield = field;
888 insert_field_into_struct ((mask & 1) ? ctx->record_type
889 : ctx->srecord_type, field);
892 if (mask & 1)
893 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
894 if ((mask & 2) && ctx->sfield_map)
895 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
898 static tree
899 install_var_local (tree var, omp_context *ctx)
901 tree new_var = omp_copy_decl_1 (var, ctx);
902 insert_decl_map (&ctx->cb, var, new_var);
903 return new_var;
906 /* Adjust the replacement for DECL in CTX for the new context. This means
907 copying the DECL_VALUE_EXPR, and fixing up the type. */
909 static void
910 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
912 tree new_decl, size;
914 new_decl = lookup_decl (decl, ctx);
916 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
918 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
919 && DECL_HAS_VALUE_EXPR_P (decl))
921 tree ve = DECL_VALUE_EXPR (decl);
922 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
923 SET_DECL_VALUE_EXPR (new_decl, ve);
924 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
927 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
929 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
930 if (size == error_mark_node)
931 size = TYPE_SIZE (TREE_TYPE (new_decl));
932 DECL_SIZE (new_decl) = size;
934 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
935 if (size == error_mark_node)
936 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
937 DECL_SIZE_UNIT (new_decl) = size;
941 /* The callback for remap_decl. Search all containing contexts for a
942 mapping of the variable; this avoids having to duplicate the splay
943 tree ahead of time. We know a mapping doesn't already exist in the
944 given context. Create new mappings to implement default semantics. */
946 static tree
947 omp_copy_decl (tree var, copy_body_data *cb)
949 omp_context *ctx = (omp_context *) cb;
950 tree new_var;
952 if (TREE_CODE (var) == LABEL_DECL)
954 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
955 return var;
956 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
957 DECL_CONTEXT (new_var) = current_function_decl;
958 insert_decl_map (&ctx->cb, var, new_var);
959 return new_var;
962 while (!is_taskreg_ctx (ctx))
964 ctx = ctx->outer;
965 if (ctx == NULL)
966 return var;
967 new_var = maybe_lookup_decl (var, ctx);
968 if (new_var)
969 return new_var;
972 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
973 return var;
975 return error_mark_node;
978 /* Create a new context, with OUTER_CTX being the surrounding context. */
980 static omp_context *
981 new_omp_context (gimple *stmt, omp_context *outer_ctx)
983 omp_context *ctx = XCNEW (omp_context);
985 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
986 (splay_tree_value) ctx);
987 ctx->stmt = stmt;
989 if (outer_ctx)
991 ctx->outer = outer_ctx;
992 ctx->cb = outer_ctx->cb;
993 ctx->cb.block = NULL;
994 ctx->depth = outer_ctx->depth + 1;
996 else
998 ctx->cb.src_fn = current_function_decl;
999 ctx->cb.dst_fn = current_function_decl;
1000 ctx->cb.src_node = cgraph_node::get (current_function_decl);
1001 gcc_checking_assert (ctx->cb.src_node);
1002 ctx->cb.dst_node = ctx->cb.src_node;
1003 ctx->cb.src_cfun = cfun;
1004 ctx->cb.copy_decl = omp_copy_decl;
1005 ctx->cb.eh_lp_nr = 0;
1006 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1007 ctx->cb.adjust_array_error_bounds = true;
1008 ctx->cb.dont_remap_vla_if_no_change = true;
1009 ctx->depth = 1;
1012 ctx->cb.decl_map = new hash_map<tree, tree>;
1014 return ctx;
1017 static gimple_seq maybe_catch_exception (gimple_seq);
1019 /* Finalize task copyfn. */
1021 static void
1022 finalize_task_copyfn (gomp_task *task_stmt)
1024 struct function *child_cfun;
1025 tree child_fn;
1026 gimple_seq seq = NULL, new_seq;
1027 gbind *bind;
1029 child_fn = gimple_omp_task_copy_fn (task_stmt);
1030 if (child_fn == NULL_TREE)
1031 return;
1033 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1034 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1036 push_cfun (child_cfun);
1037 bind = gimplify_body (child_fn, false);
1038 gimple_seq_add_stmt (&seq, bind);
1039 new_seq = maybe_catch_exception (seq);
1040 if (new_seq != seq)
1042 bind = gimple_build_bind (NULL, new_seq, NULL);
1043 seq = NULL;
1044 gimple_seq_add_stmt (&seq, bind);
1046 gimple_set_body (child_fn, seq);
1047 pop_cfun ();
1049 /* Inform the callgraph about the new function. */
1050 cgraph_node *node = cgraph_node::get_create (child_fn);
1051 node->parallelized_function = 1;
1052 cgraph_node::add_new_function (child_fn, false);
1055 /* Destroy a omp_context data structures. Called through the splay tree
1056 value delete callback. */
1058 static void
1059 delete_omp_context (splay_tree_value value)
1061 omp_context *ctx = (omp_context *) value;
1063 delete ctx->cb.decl_map;
1065 if (ctx->field_map)
1066 splay_tree_delete (ctx->field_map);
1067 if (ctx->sfield_map)
1068 splay_tree_delete (ctx->sfield_map);
1070 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1071 it produces corrupt debug information. */
1072 if (ctx->record_type)
1074 tree t;
1075 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1076 DECL_ABSTRACT_ORIGIN (t) = NULL;
1078 if (ctx->srecord_type)
1080 tree t;
1081 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1082 DECL_ABSTRACT_ORIGIN (t) = NULL;
1085 if (is_task_ctx (ctx))
1086 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1088 if (ctx->task_reduction_map)
1090 ctx->task_reductions.release ();
1091 delete ctx->task_reduction_map;
1094 delete ctx->lastprivate_conditional_map;
1095 delete ctx->allocate_map;
1097 XDELETE (ctx);
1100 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1101 context. */
1103 static void
1104 fixup_child_record_type (omp_context *ctx)
1106 tree f, type = ctx->record_type;
1108 if (!ctx->receiver_decl)
1109 return;
1110 /* ??? It isn't sufficient to just call remap_type here, because
1111 variably_modified_type_p doesn't work the way we expect for
1112 record types. Testing each field for whether it needs remapping
1113 and creating a new record by hand works, however. */
1114 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1115 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1116 break;
1117 if (f)
1119 tree name, new_fields = NULL;
1121 type = lang_hooks.types.make_type (RECORD_TYPE);
1122 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1123 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1124 TYPE_DECL, name, type);
1125 TYPE_NAME (type) = name;
1127 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1129 tree new_f = copy_node (f);
1130 DECL_CONTEXT (new_f) = type;
1131 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1132 DECL_CHAIN (new_f) = new_fields;
1133 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1134 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1135 &ctx->cb, NULL);
1136 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1137 &ctx->cb, NULL);
1138 new_fields = new_f;
1140 /* Arrange to be able to look up the receiver field
1141 given the sender field. */
1142 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1143 (splay_tree_value) new_f);
1145 TYPE_FIELDS (type) = nreverse (new_fields);
1146 layout_type (type);
1149 /* In a target region we never modify any of the pointers in *.omp_data_i,
1150 so attempt to help the optimizers. */
1151 if (is_gimple_omp_offloaded (ctx->stmt))
1152 type = build_qualified_type (type, TYPE_QUAL_CONST);
1154 TREE_TYPE (ctx->receiver_decl)
1155 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1158 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1159 specified by CLAUSES. */
1161 static void
1162 scan_sharing_clauses (tree clauses, omp_context *ctx)
1164 tree c, decl;
1165 bool scan_array_reductions = false;
1167 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1168 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1169 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1170 /* omp_default_mem_alloc is 1 */
1171 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1172 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1174 if (ctx->allocate_map == NULL)
1175 ctx->allocate_map = new hash_map<tree, tree>;
1176 tree val = integer_zero_node;
1177 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1178 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1179 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1180 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1181 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1184 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1186 bool by_ref;
1188 switch (OMP_CLAUSE_CODE (c))
1190 case OMP_CLAUSE_PRIVATE:
1191 decl = OMP_CLAUSE_DECL (c);
1192 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1193 goto do_private;
1194 else if (!is_variable_sized (decl))
1195 install_var_local (decl, ctx);
1196 break;
1198 case OMP_CLAUSE_SHARED:
1199 decl = OMP_CLAUSE_DECL (c);
1200 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1201 ctx->allocate_map->remove (decl);
1202 /* Ignore shared directives in teams construct inside of
1203 target construct. */
1204 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1205 && !is_host_teams_ctx (ctx))
1207 /* Global variables don't need to be copied,
1208 the receiver side will use them directly. */
1209 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1210 if (is_global_var (odecl))
1211 break;
1212 insert_decl_map (&ctx->cb, decl, odecl);
1213 break;
1215 gcc_assert (is_taskreg_ctx (ctx));
1216 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1217 || !is_variable_sized (decl));
1218 /* Global variables don't need to be copied,
1219 the receiver side will use them directly. */
1220 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1221 break;
1222 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1224 use_pointer_for_field (decl, ctx);
1225 break;
1227 by_ref = use_pointer_for_field (decl, NULL);
1228 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1229 || TREE_ADDRESSABLE (decl)
1230 || by_ref
1231 || omp_privatize_by_reference (decl))
1233 by_ref = use_pointer_for_field (decl, ctx);
1234 install_var_field (decl, by_ref, 3, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 /* We don't need to copy const scalar vars back. */
1239 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1240 goto do_private;
1242 case OMP_CLAUSE_REDUCTION:
1243 /* Collect 'reduction' clauses on OpenACC compute construct. */
1244 if (is_gimple_omp_oacc (ctx->stmt)
1245 && is_gimple_omp_offloaded (ctx->stmt))
1247 /* No 'reduction' clauses on OpenACC 'kernels'. */
1248 gcc_checking_assert (!is_oacc_kernels (ctx));
1249 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1250 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1252 ctx->local_reduction_clauses
1253 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1255 /* FALLTHRU */
1257 case OMP_CLAUSE_IN_REDUCTION:
1258 decl = OMP_CLAUSE_DECL (c);
1259 if (ctx->allocate_map
1260 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1261 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1262 || OMP_CLAUSE_REDUCTION_TASK (c)))
1263 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1264 || is_task_ctx (ctx)))
1266 /* For now. */
1267 if (ctx->allocate_map->get (decl))
1268 ctx->allocate_map->remove (decl);
1270 if (TREE_CODE (decl) == MEM_REF)
1272 tree t = TREE_OPERAND (decl, 0);
1273 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1274 t = TREE_OPERAND (t, 0);
1275 if (TREE_CODE (t) == INDIRECT_REF
1276 || TREE_CODE (t) == ADDR_EXPR)
1277 t = TREE_OPERAND (t, 0);
1278 if (is_omp_target (ctx->stmt))
1280 if (is_variable_sized (t))
1282 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1283 t = DECL_VALUE_EXPR (t);
1284 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1285 t = TREE_OPERAND (t, 0);
1286 gcc_assert (DECL_P (t));
1288 tree at = t;
1289 if (ctx->outer)
1290 scan_omp_op (&at, ctx->outer);
1291 tree nt = omp_copy_decl_1 (at, ctx->outer);
1292 splay_tree_insert (ctx->field_map,
1293 (splay_tree_key) &DECL_CONTEXT (t),
1294 (splay_tree_value) nt);
1295 if (at != t)
1296 splay_tree_insert (ctx->field_map,
1297 (splay_tree_key) &DECL_CONTEXT (at),
1298 (splay_tree_value) nt);
1299 break;
1301 install_var_local (t, ctx);
1302 if (is_taskreg_ctx (ctx)
1303 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1304 || (is_task_ctx (ctx)
1305 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1306 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1307 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1308 == POINTER_TYPE)))))
1309 && !is_variable_sized (t)
1310 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1311 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1312 && !is_task_ctx (ctx))))
1314 by_ref = use_pointer_for_field (t, NULL);
1315 if (is_task_ctx (ctx)
1316 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1317 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1319 install_var_field (t, false, 1, ctx);
1320 install_var_field (t, by_ref, 2, ctx);
1322 else
1323 install_var_field (t, by_ref, 3, ctx);
1325 break;
1327 if (is_omp_target (ctx->stmt))
1329 tree at = decl;
1330 if (ctx->outer)
1331 scan_omp_op (&at, ctx->outer);
1332 tree nt = omp_copy_decl_1 (at, ctx->outer);
1333 splay_tree_insert (ctx->field_map,
1334 (splay_tree_key) &DECL_CONTEXT (decl),
1335 (splay_tree_value) nt);
1336 if (at != decl)
1337 splay_tree_insert (ctx->field_map,
1338 (splay_tree_key) &DECL_CONTEXT (at),
1339 (splay_tree_value) nt);
1340 break;
1342 if (is_task_ctx (ctx)
1343 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1344 && OMP_CLAUSE_REDUCTION_TASK (c)
1345 && is_parallel_ctx (ctx)))
1347 /* Global variables don't need to be copied,
1348 the receiver side will use them directly. */
1349 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1351 by_ref = use_pointer_for_field (decl, ctx);
1352 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1353 install_var_field (decl, by_ref, 3, ctx);
1355 install_var_local (decl, ctx);
1356 break;
1358 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1359 && OMP_CLAUSE_REDUCTION_TASK (c))
1361 install_var_local (decl, ctx);
1362 break;
1364 goto do_private;
1366 case OMP_CLAUSE_LASTPRIVATE:
1367 /* Let the corresponding firstprivate clause create
1368 the variable. */
1369 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1370 break;
1371 /* FALLTHRU */
1373 case OMP_CLAUSE_FIRSTPRIVATE:
1374 case OMP_CLAUSE_LINEAR:
1375 decl = OMP_CLAUSE_DECL (c);
1376 do_private:
1377 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1379 && is_gimple_omp_offloaded (ctx->stmt))
1381 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1383 by_ref = !omp_privatize_by_reference (decl);
1384 install_var_field (decl, by_ref, 3, ctx);
1386 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1387 install_var_field (decl, true, 3, ctx);
1388 else
1389 install_var_field (decl, false, 3, ctx);
1391 if (is_variable_sized (decl))
1393 if (is_task_ctx (ctx))
1395 if (ctx->allocate_map
1396 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1398 /* For now. */
1399 if (ctx->allocate_map->get (decl))
1400 ctx->allocate_map->remove (decl);
1402 install_var_field (decl, false, 1, ctx);
1404 break;
1406 else if (is_taskreg_ctx (ctx))
1408 bool global
1409 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1410 by_ref = use_pointer_for_field (decl, NULL);
1412 if (is_task_ctx (ctx)
1413 && (global || by_ref || omp_privatize_by_reference (decl)))
1415 if (ctx->allocate_map
1416 && ctx->allocate_map->get (decl))
1417 install_var_field (decl, by_ref, 32 | 1, ctx);
1418 else
1419 install_var_field (decl, false, 1, ctx);
1420 if (!global)
1421 install_var_field (decl, by_ref, 2, ctx);
1423 else if (!global)
1424 install_var_field (decl, by_ref, 3, ctx);
1426 install_var_local (decl, ctx);
1427 break;
1429 case OMP_CLAUSE_USE_DEVICE_PTR:
1430 case OMP_CLAUSE_USE_DEVICE_ADDR:
1431 decl = OMP_CLAUSE_DECL (c);
1433 /* Fortran array descriptors. */
1434 if (lang_hooks.decls.omp_array_data (decl, true))
1435 install_var_field (decl, false, 19, ctx);
1436 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1437 && !omp_privatize_by_reference (decl)
1438 && !omp_is_allocatable_or_ptr (decl))
1439 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1440 install_var_field (decl, true, 11, ctx);
1441 else
1442 install_var_field (decl, false, 11, ctx);
1443 if (DECL_SIZE (decl)
1444 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1446 tree decl2 = DECL_VALUE_EXPR (decl);
1447 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1448 decl2 = TREE_OPERAND (decl2, 0);
1449 gcc_assert (DECL_P (decl2));
1450 install_var_local (decl2, ctx);
1452 install_var_local (decl, ctx);
1453 break;
1455 case OMP_CLAUSE_IS_DEVICE_PTR:
1456 decl = OMP_CLAUSE_DECL (c);
1457 goto do_private;
1459 case OMP_CLAUSE__LOOPTEMP_:
1460 case OMP_CLAUSE__REDUCTEMP_:
1461 gcc_assert (is_taskreg_ctx (ctx));
1462 decl = OMP_CLAUSE_DECL (c);
1463 install_var_field (decl, false, 3, ctx);
1464 install_var_local (decl, ctx);
1465 break;
1467 case OMP_CLAUSE_COPYPRIVATE:
1468 case OMP_CLAUSE_COPYIN:
1469 decl = OMP_CLAUSE_DECL (c);
1470 by_ref = use_pointer_for_field (decl, NULL);
1471 install_var_field (decl, by_ref, 3, ctx);
1472 break;
1474 case OMP_CLAUSE_FINAL:
1475 case OMP_CLAUSE_IF:
1476 case OMP_CLAUSE_NUM_THREADS:
1477 case OMP_CLAUSE_NUM_TEAMS:
1478 case OMP_CLAUSE_THREAD_LIMIT:
1479 case OMP_CLAUSE_DEVICE:
1480 case OMP_CLAUSE_SCHEDULE:
1481 case OMP_CLAUSE_DIST_SCHEDULE:
1482 case OMP_CLAUSE_DEPEND:
1483 case OMP_CLAUSE_PRIORITY:
1484 case OMP_CLAUSE_GRAINSIZE:
1485 case OMP_CLAUSE_NUM_TASKS:
1486 case OMP_CLAUSE_NUM_GANGS:
1487 case OMP_CLAUSE_NUM_WORKERS:
1488 case OMP_CLAUSE_VECTOR_LENGTH:
1489 case OMP_CLAUSE_DETACH:
1490 case OMP_CLAUSE_FILTER:
1491 if (ctx->outer)
1492 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1493 break;
1495 case OMP_CLAUSE_TO:
1496 case OMP_CLAUSE_FROM:
1497 case OMP_CLAUSE_MAP:
1498 if (ctx->outer)
1499 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1500 decl = OMP_CLAUSE_DECL (c);
1501 /* Global variables with "omp declare target" attribute
1502 don't need to be copied, the receiver side will use them
1503 directly. However, global variables with "omp declare target link"
1504 attribute need to be copied. Or when ALWAYS modifier is used. */
1505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1506 && DECL_P (decl)
1507 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1508 && (OMP_CLAUSE_MAP_KIND (c)
1509 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1510 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1511 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1512 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1513 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1514 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1515 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1516 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1517 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1518 && varpool_node::get_create (decl)->offloadable
1519 && !lookup_attribute ("omp declare target link",
1520 DECL_ATTRIBUTES (decl)))
1521 break;
1522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1525 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1526 not offloaded; there is nothing to map for those. */
1527 if (!is_gimple_omp_offloaded (ctx->stmt)
1528 && !POINTER_TYPE_P (TREE_TYPE (decl))
1529 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1530 break;
1532 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1533 && DECL_P (decl)
1534 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1535 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1536 && is_omp_target (ctx->stmt))
1538 /* If this is an offloaded region, an attach operation should
1539 only exist when the pointer variable is mapped in a prior
1540 clause. */
1541 if (is_gimple_omp_offloaded (ctx->stmt))
1542 gcc_assert
1543 (maybe_lookup_decl (decl, ctx)
1544 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1545 && lookup_attribute ("omp declare target",
1546 DECL_ATTRIBUTES (decl))));
1548 /* By itself, attach/detach is generated as part of pointer
1549 variable mapping and should not create new variables in the
1550 offloaded region, however sender refs for it must be created
1551 for its address to be passed to the runtime. */
1552 tree field
1553 = build_decl (OMP_CLAUSE_LOCATION (c),
1554 FIELD_DECL, NULL_TREE, ptr_type_node);
1555 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1556 insert_field_into_struct (ctx->record_type, field);
1557 /* To not clash with a map of the pointer variable itself,
1558 attach/detach maps have their field looked up by the *clause*
1559 tree expression, not the decl. */
1560 gcc_assert (!splay_tree_lookup (ctx->field_map,
1561 (splay_tree_key) c));
1562 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1563 (splay_tree_value) field);
1564 break;
1566 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1567 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1568 || (OMP_CLAUSE_MAP_KIND (c)
1569 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1571 if (TREE_CODE (decl) == COMPONENT_REF
1572 || (TREE_CODE (decl) == INDIRECT_REF
1573 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1574 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1575 == REFERENCE_TYPE)))
1576 break;
1577 if (DECL_SIZE (decl)
1578 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1580 tree decl2 = DECL_VALUE_EXPR (decl);
1581 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1582 decl2 = TREE_OPERAND (decl2, 0);
1583 gcc_assert (DECL_P (decl2));
1584 install_var_local (decl2, ctx);
1586 install_var_local (decl, ctx);
1587 break;
1589 if (DECL_P (decl))
1591 if (DECL_SIZE (decl)
1592 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1594 tree decl2 = DECL_VALUE_EXPR (decl);
1595 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1596 decl2 = TREE_OPERAND (decl2, 0);
1597 gcc_assert (DECL_P (decl2));
1598 install_var_field (decl2, true, 3, ctx);
1599 install_var_local (decl2, ctx);
1600 install_var_local (decl, ctx);
1602 else
1604 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1605 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1606 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1607 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1608 install_var_field (decl, true, 7, ctx);
1609 else
1610 install_var_field (decl, true, 3, ctx);
1611 if (is_gimple_omp_offloaded (ctx->stmt)
1612 && !(is_gimple_omp_oacc (ctx->stmt)
1613 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1614 install_var_local (decl, ctx);
1617 else
1619 tree base = get_base_address (decl);
1620 tree nc = OMP_CLAUSE_CHAIN (c);
1621 if (DECL_P (base)
1622 && nc != NULL_TREE
1623 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1624 && OMP_CLAUSE_DECL (nc) == base
1625 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1626 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1628 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1629 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1631 else
1633 if (ctx->outer)
1635 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1636 decl = OMP_CLAUSE_DECL (c);
1638 gcc_assert (!splay_tree_lookup (ctx->field_map,
1639 (splay_tree_key) decl));
1640 tree field
1641 = build_decl (OMP_CLAUSE_LOCATION (c),
1642 FIELD_DECL, NULL_TREE, ptr_type_node);
1643 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1644 insert_field_into_struct (ctx->record_type, field);
1645 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1646 (splay_tree_value) field);
1649 break;
1651 case OMP_CLAUSE_ORDER:
1652 ctx->order_concurrent = true;
1653 break;
1655 case OMP_CLAUSE_BIND:
1656 ctx->loop_p = true;
1657 break;
1659 case OMP_CLAUSE_NOWAIT:
1660 case OMP_CLAUSE_ORDERED:
1661 case OMP_CLAUSE_COLLAPSE:
1662 case OMP_CLAUSE_UNTIED:
1663 case OMP_CLAUSE_MERGEABLE:
1664 case OMP_CLAUSE_PROC_BIND:
1665 case OMP_CLAUSE_SAFELEN:
1666 case OMP_CLAUSE_SIMDLEN:
1667 case OMP_CLAUSE_THREADS:
1668 case OMP_CLAUSE_SIMD:
1669 case OMP_CLAUSE_NOGROUP:
1670 case OMP_CLAUSE_DEFAULTMAP:
1671 case OMP_CLAUSE_ASYNC:
1672 case OMP_CLAUSE_WAIT:
1673 case OMP_CLAUSE_GANG:
1674 case OMP_CLAUSE_WORKER:
1675 case OMP_CLAUSE_VECTOR:
1676 case OMP_CLAUSE_INDEPENDENT:
1677 case OMP_CLAUSE_AUTO:
1678 case OMP_CLAUSE_SEQ:
1679 case OMP_CLAUSE_TILE:
1680 case OMP_CLAUSE__SIMT_:
1681 case OMP_CLAUSE_DEFAULT:
1682 case OMP_CLAUSE_NONTEMPORAL:
1683 case OMP_CLAUSE_IF_PRESENT:
1684 case OMP_CLAUSE_FINALIZE:
1685 case OMP_CLAUSE_TASK_REDUCTION:
1686 case OMP_CLAUSE_ALLOCATE:
1687 break;
1689 case OMP_CLAUSE_ALIGNED:
1690 decl = OMP_CLAUSE_DECL (c);
1691 if (is_global_var (decl)
1692 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1693 install_var_local (decl, ctx);
1694 break;
1696 case OMP_CLAUSE__CONDTEMP_:
1697 decl = OMP_CLAUSE_DECL (c);
1698 if (is_parallel_ctx (ctx))
1700 install_var_field (decl, false, 3, ctx);
1701 install_var_local (decl, ctx);
1703 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1704 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1705 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1706 install_var_local (decl, ctx);
1707 break;
1709 case OMP_CLAUSE__CACHE_:
1710 case OMP_CLAUSE_NOHOST:
1711 default:
1712 gcc_unreachable ();
1716 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1718 switch (OMP_CLAUSE_CODE (c))
1720 case OMP_CLAUSE_LASTPRIVATE:
1721 /* Let the corresponding firstprivate clause create
1722 the variable. */
1723 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1724 scan_array_reductions = true;
1725 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1726 break;
1727 /* FALLTHRU */
1729 case OMP_CLAUSE_FIRSTPRIVATE:
1730 case OMP_CLAUSE_PRIVATE:
1731 case OMP_CLAUSE_LINEAR:
1732 case OMP_CLAUSE_IS_DEVICE_PTR:
1733 decl = OMP_CLAUSE_DECL (c);
1734 if (is_variable_sized (decl))
1736 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1737 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1738 && is_gimple_omp_offloaded (ctx->stmt))
1740 tree decl2 = DECL_VALUE_EXPR (decl);
1741 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1742 decl2 = TREE_OPERAND (decl2, 0);
1743 gcc_assert (DECL_P (decl2));
1744 install_var_local (decl2, ctx);
1745 fixup_remapped_decl (decl2, ctx, false);
1747 install_var_local (decl, ctx);
1749 fixup_remapped_decl (decl, ctx,
1750 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1751 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1752 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1753 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1754 scan_array_reductions = true;
1755 break;
1757 case OMP_CLAUSE_REDUCTION:
1758 case OMP_CLAUSE_IN_REDUCTION:
1759 decl = OMP_CLAUSE_DECL (c);
1760 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1762 if (is_variable_sized (decl))
1763 install_var_local (decl, ctx);
1764 fixup_remapped_decl (decl, ctx, false);
1766 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1767 scan_array_reductions = true;
1768 break;
1770 case OMP_CLAUSE_TASK_REDUCTION:
1771 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1772 scan_array_reductions = true;
1773 break;
1775 case OMP_CLAUSE_SHARED:
1776 /* Ignore shared directives in teams construct inside of
1777 target construct. */
1778 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1779 && !is_host_teams_ctx (ctx))
1780 break;
1781 decl = OMP_CLAUSE_DECL (c);
1782 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1783 break;
1784 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1786 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1787 ctx->outer)))
1788 break;
1789 bool by_ref = use_pointer_for_field (decl, ctx);
1790 install_var_field (decl, by_ref, 11, ctx);
1791 break;
1793 fixup_remapped_decl (decl, ctx, false);
1794 break;
1796 case OMP_CLAUSE_MAP:
1797 if (!is_gimple_omp_offloaded (ctx->stmt))
1798 break;
1799 decl = OMP_CLAUSE_DECL (c);
1800 if (DECL_P (decl)
1801 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1802 && (OMP_CLAUSE_MAP_KIND (c)
1803 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1804 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1805 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1806 && varpool_node::get_create (decl)->offloadable)
1807 break;
1808 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1809 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1810 && is_omp_target (ctx->stmt)
1811 && !is_gimple_omp_offloaded (ctx->stmt))
1812 break;
1813 if (DECL_P (decl))
1815 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1816 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1817 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1818 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1820 tree new_decl = lookup_decl (decl, ctx);
1821 TREE_TYPE (new_decl)
1822 = remap_type (TREE_TYPE (decl), &ctx->cb);
1824 else if (DECL_SIZE (decl)
1825 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1827 tree decl2 = DECL_VALUE_EXPR (decl);
1828 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1829 decl2 = TREE_OPERAND (decl2, 0);
1830 gcc_assert (DECL_P (decl2));
1831 fixup_remapped_decl (decl2, ctx, false);
1832 fixup_remapped_decl (decl, ctx, true);
1834 else
1835 fixup_remapped_decl (decl, ctx, false);
1837 break;
1839 case OMP_CLAUSE_COPYPRIVATE:
1840 case OMP_CLAUSE_COPYIN:
1841 case OMP_CLAUSE_DEFAULT:
1842 case OMP_CLAUSE_IF:
1843 case OMP_CLAUSE_NUM_THREADS:
1844 case OMP_CLAUSE_NUM_TEAMS:
1845 case OMP_CLAUSE_THREAD_LIMIT:
1846 case OMP_CLAUSE_DEVICE:
1847 case OMP_CLAUSE_SCHEDULE:
1848 case OMP_CLAUSE_DIST_SCHEDULE:
1849 case OMP_CLAUSE_NOWAIT:
1850 case OMP_CLAUSE_ORDERED:
1851 case OMP_CLAUSE_COLLAPSE:
1852 case OMP_CLAUSE_UNTIED:
1853 case OMP_CLAUSE_FINAL:
1854 case OMP_CLAUSE_MERGEABLE:
1855 case OMP_CLAUSE_PROC_BIND:
1856 case OMP_CLAUSE_SAFELEN:
1857 case OMP_CLAUSE_SIMDLEN:
1858 case OMP_CLAUSE_ALIGNED:
1859 case OMP_CLAUSE_DEPEND:
1860 case OMP_CLAUSE_DETACH:
1861 case OMP_CLAUSE_ALLOCATE:
1862 case OMP_CLAUSE__LOOPTEMP_:
1863 case OMP_CLAUSE__REDUCTEMP_:
1864 case OMP_CLAUSE_TO:
1865 case OMP_CLAUSE_FROM:
1866 case OMP_CLAUSE_PRIORITY:
1867 case OMP_CLAUSE_GRAINSIZE:
1868 case OMP_CLAUSE_NUM_TASKS:
1869 case OMP_CLAUSE_THREADS:
1870 case OMP_CLAUSE_SIMD:
1871 case OMP_CLAUSE_NOGROUP:
1872 case OMP_CLAUSE_DEFAULTMAP:
1873 case OMP_CLAUSE_ORDER:
1874 case OMP_CLAUSE_BIND:
1875 case OMP_CLAUSE_USE_DEVICE_PTR:
1876 case OMP_CLAUSE_USE_DEVICE_ADDR:
1877 case OMP_CLAUSE_NONTEMPORAL:
1878 case OMP_CLAUSE_ASYNC:
1879 case OMP_CLAUSE_WAIT:
1880 case OMP_CLAUSE_NUM_GANGS:
1881 case OMP_CLAUSE_NUM_WORKERS:
1882 case OMP_CLAUSE_VECTOR_LENGTH:
1883 case OMP_CLAUSE_GANG:
1884 case OMP_CLAUSE_WORKER:
1885 case OMP_CLAUSE_VECTOR:
1886 case OMP_CLAUSE_INDEPENDENT:
1887 case OMP_CLAUSE_AUTO:
1888 case OMP_CLAUSE_SEQ:
1889 case OMP_CLAUSE_TILE:
1890 case OMP_CLAUSE__SIMT_:
1891 case OMP_CLAUSE_IF_PRESENT:
1892 case OMP_CLAUSE_FINALIZE:
1893 case OMP_CLAUSE_FILTER:
1894 case OMP_CLAUSE__CONDTEMP_:
1895 break;
1897 case OMP_CLAUSE__CACHE_:
1898 case OMP_CLAUSE_NOHOST:
1899 default:
1900 gcc_unreachable ();
1904 gcc_checking_assert (!scan_array_reductions
1905 || !is_gimple_omp_oacc (ctx->stmt));
1906 if (scan_array_reductions)
1908 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1909 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1910 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1911 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1912 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1914 omp_context *rctx = ctx;
1915 if (is_omp_target (ctx->stmt))
1916 rctx = ctx->outer;
1917 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1918 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1920 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1921 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1922 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1923 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1924 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1925 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1929 /* Create a new name for omp child function. Returns an identifier. */
1931 static tree
1932 create_omp_child_function_name (bool task_copy)
1934 return clone_function_name_numbered (current_function_decl,
1935 task_copy ? "_omp_cpyfn" : "_omp_fn");
1938 /* Return true if CTX may belong to offloaded code: either if current function
1939 is offloaded, or any enclosing context corresponds to a target region. */
1941 static bool
1942 omp_maybe_offloaded_ctx (omp_context *ctx)
1944 if (cgraph_node::get (current_function_decl)->offloadable)
1945 return true;
1946 for (; ctx; ctx = ctx->outer)
1947 if (is_gimple_omp_offloaded (ctx->stmt))
1948 return true;
1949 return false;
1952 /* Build a decl for the omp child function. It'll not contain a body
1953 yet, just the bare decl. */
1955 static void
1956 create_omp_child_function (omp_context *ctx, bool task_copy)
1958 tree decl, type, name, t;
1960 name = create_omp_child_function_name (task_copy);
1961 if (task_copy)
1962 type = build_function_type_list (void_type_node, ptr_type_node,
1963 ptr_type_node, NULL_TREE);
1964 else
1965 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1967 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1969 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1970 || !task_copy);
1971 if (!task_copy)
1972 ctx->cb.dst_fn = decl;
1973 else
1974 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1976 TREE_STATIC (decl) = 1;
1977 TREE_USED (decl) = 1;
1978 DECL_ARTIFICIAL (decl) = 1;
1979 DECL_IGNORED_P (decl) = 0;
1980 TREE_PUBLIC (decl) = 0;
1981 DECL_UNINLINABLE (decl) = 1;
1982 DECL_EXTERNAL (decl) = 0;
1983 DECL_CONTEXT (decl) = NULL_TREE;
1984 DECL_INITIAL (decl) = make_node (BLOCK);
1985 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1986 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1987 /* Remove omp declare simd attribute from the new attributes. */
1988 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1990 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1991 a = a2;
1992 a = TREE_CHAIN (a);
1993 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1994 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1995 *p = TREE_CHAIN (*p);
1996 else
1998 tree chain = TREE_CHAIN (*p);
1999 *p = copy_node (*p);
2000 p = &TREE_CHAIN (*p);
2001 *p = chain;
2004 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2005 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2006 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2007 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2008 DECL_FUNCTION_VERSIONED (decl)
2009 = DECL_FUNCTION_VERSIONED (current_function_decl);
2011 if (omp_maybe_offloaded_ctx (ctx))
2013 cgraph_node::get_create (decl)->offloadable = 1;
2014 if (ENABLE_OFFLOADING)
2015 g->have_offload = true;
2018 if (cgraph_node::get_create (decl)->offloadable)
2020 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2021 ? "omp target entrypoint"
2022 : "omp declare target");
2023 if (lookup_attribute ("omp declare target",
2024 DECL_ATTRIBUTES (current_function_decl)))
2026 if (is_gimple_omp_offloaded (ctx->stmt))
2027 DECL_ATTRIBUTES (decl)
2028 = remove_attribute ("omp declare target",
2029 copy_list (DECL_ATTRIBUTES (decl)));
2030 else
2031 target_attr = NULL;
2033 if (target_attr)
2034 DECL_ATTRIBUTES (decl)
2035 = tree_cons (get_identifier (target_attr),
2036 NULL_TREE, DECL_ATTRIBUTES (decl));
2039 t = build_decl (DECL_SOURCE_LOCATION (decl),
2040 RESULT_DECL, NULL_TREE, void_type_node);
2041 DECL_ARTIFICIAL (t) = 1;
2042 DECL_IGNORED_P (t) = 1;
2043 DECL_CONTEXT (t) = decl;
2044 DECL_RESULT (decl) = t;
2046 tree data_name = get_identifier (".omp_data_i");
2047 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2048 ptr_type_node);
2049 DECL_ARTIFICIAL (t) = 1;
2050 DECL_NAMELESS (t) = 1;
2051 DECL_ARG_TYPE (t) = ptr_type_node;
2052 DECL_CONTEXT (t) = current_function_decl;
2053 TREE_USED (t) = 1;
2054 TREE_READONLY (t) = 1;
2055 DECL_ARGUMENTS (decl) = t;
2056 if (!task_copy)
2057 ctx->receiver_decl = t;
2058 else
2060 t = build_decl (DECL_SOURCE_LOCATION (decl),
2061 PARM_DECL, get_identifier (".omp_data_o"),
2062 ptr_type_node);
2063 DECL_ARTIFICIAL (t) = 1;
2064 DECL_NAMELESS (t) = 1;
2065 DECL_ARG_TYPE (t) = ptr_type_node;
2066 DECL_CONTEXT (t) = current_function_decl;
2067 TREE_USED (t) = 1;
2068 TREE_ADDRESSABLE (t) = 1;
2069 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2070 DECL_ARGUMENTS (decl) = t;
2073 /* Allocate memory for the function structure. The call to
2074 allocate_struct_function clobbers CFUN, so we need to restore
2075 it afterward. */
2076 push_struct_function (decl);
2077 cfun->function_end_locus = gimple_location (ctx->stmt);
2078 init_tree_ssa (cfun);
2079 pop_cfun ();
2082 /* Callback for walk_gimple_seq. Check if combined parallel
2083 contains gimple_omp_for_combined_into_p OMP_FOR. */
2085 tree
2086 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2087 bool *handled_ops_p,
2088 struct walk_stmt_info *wi)
2090 gimple *stmt = gsi_stmt (*gsi_p);
2092 *handled_ops_p = true;
2093 switch (gimple_code (stmt))
2095 WALK_SUBSTMTS;
2097 case GIMPLE_OMP_FOR:
2098 if (gimple_omp_for_combined_into_p (stmt)
2099 && gimple_omp_for_kind (stmt)
2100 == *(const enum gf_mask *) (wi->info))
2102 wi->info = stmt;
2103 return integer_zero_node;
2105 break;
2106 default:
2107 break;
2109 return NULL;
2112 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2114 static void
2115 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2116 omp_context *outer_ctx)
2118 struct walk_stmt_info wi;
2120 memset (&wi, 0, sizeof (wi));
2121 wi.val_only = true;
2122 wi.info = (void *) &msk;
2123 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2124 if (wi.info != (void *) &msk)
2126 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2127 struct omp_for_data fd;
2128 omp_extract_for_data (for_stmt, &fd, NULL);
2129 /* We need two temporaries with fd.loop.v type (istart/iend)
2130 and then (fd.collapse - 1) temporaries with the same
2131 type for count2 ... countN-1 vars if not constant. */
2132 size_t count = 2, i;
2133 tree type = fd.iter_type;
2134 if (fd.collapse > 1
2135 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2137 count += fd.collapse - 1;
2138 /* If there are lastprivate clauses on the inner
2139 GIMPLE_OMP_FOR, add one more temporaries for the total number
2140 of iterations (product of count1 ... countN-1). */
2141 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2142 OMP_CLAUSE_LASTPRIVATE)
2143 || (msk == GF_OMP_FOR_KIND_FOR
2144 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2145 OMP_CLAUSE_LASTPRIVATE)))
2147 tree temp = create_tmp_var (type);
2148 tree c = build_omp_clause (UNKNOWN_LOCATION,
2149 OMP_CLAUSE__LOOPTEMP_);
2150 insert_decl_map (&outer_ctx->cb, temp, temp);
2151 OMP_CLAUSE_DECL (c) = temp;
2152 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2153 gimple_omp_taskreg_set_clauses (stmt, c);
2155 if (fd.non_rect
2156 && fd.last_nonrect == fd.first_nonrect + 1)
2157 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2158 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2160 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2161 tree type2 = TREE_TYPE (v);
2162 count++;
2163 for (i = 0; i < 3; i++)
2165 tree temp = create_tmp_var (type2);
2166 tree c = build_omp_clause (UNKNOWN_LOCATION,
2167 OMP_CLAUSE__LOOPTEMP_);
2168 insert_decl_map (&outer_ctx->cb, temp, temp);
2169 OMP_CLAUSE_DECL (c) = temp;
2170 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2171 gimple_omp_taskreg_set_clauses (stmt, c);
2175 for (i = 0; i < count; i++)
2177 tree temp = create_tmp_var (type);
2178 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2179 insert_decl_map (&outer_ctx->cb, temp, temp);
2180 OMP_CLAUSE_DECL (c) = temp;
2181 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2182 gimple_omp_taskreg_set_clauses (stmt, c);
2185 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2186 && omp_find_clause (gimple_omp_task_clauses (stmt),
2187 OMP_CLAUSE_REDUCTION))
2189 tree type = build_pointer_type (pointer_sized_int_node);
2190 tree temp = create_tmp_var (type);
2191 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2192 insert_decl_map (&outer_ctx->cb, temp, temp);
2193 OMP_CLAUSE_DECL (c) = temp;
2194 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2195 gimple_omp_task_set_clauses (stmt, c);
2199 /* Scan an OpenMP parallel directive. */
2201 static void
2202 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2204 omp_context *ctx;
2205 tree name;
2206 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2208 /* Ignore parallel directives with empty bodies, unless there
2209 are copyin clauses. */
2210 if (optimize > 0
2211 && empty_body_p (gimple_omp_body (stmt))
2212 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2213 OMP_CLAUSE_COPYIN) == NULL)
2215 gsi_replace (gsi, gimple_build_nop (), false);
2216 return;
2219 if (gimple_omp_parallel_combined_p (stmt))
2220 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2221 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2222 OMP_CLAUSE_REDUCTION);
2223 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2224 if (OMP_CLAUSE_REDUCTION_TASK (c))
2226 tree type = build_pointer_type (pointer_sized_int_node);
2227 tree temp = create_tmp_var (type);
2228 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2229 if (outer_ctx)
2230 insert_decl_map (&outer_ctx->cb, temp, temp);
2231 OMP_CLAUSE_DECL (c) = temp;
2232 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2233 gimple_omp_parallel_set_clauses (stmt, c);
2234 break;
2236 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2237 break;
2239 ctx = new_omp_context (stmt, outer_ctx);
2240 taskreg_contexts.safe_push (ctx);
2241 if (taskreg_nesting_level > 1)
2242 ctx->is_nested = true;
2243 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2244 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2245 name = create_tmp_var_name (".omp_data_s");
2246 name = build_decl (gimple_location (stmt),
2247 TYPE_DECL, name, ctx->record_type);
2248 DECL_ARTIFICIAL (name) = 1;
2249 DECL_NAMELESS (name) = 1;
2250 TYPE_NAME (ctx->record_type) = name;
2251 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2252 create_omp_child_function (ctx, false);
2253 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2255 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2256 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2258 if (TYPE_FIELDS (ctx->record_type) == NULL)
2259 ctx->record_type = ctx->receiver_decl = NULL;
2262 /* Scan an OpenMP task directive. */
2264 static void
2265 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2267 omp_context *ctx;
2268 tree name, t;
2269 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2271 /* Ignore task directives with empty bodies, unless they have depend
2272 clause. */
2273 if (optimize > 0
2274 && gimple_omp_body (stmt)
2275 && empty_body_p (gimple_omp_body (stmt))
2276 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2278 gsi_replace (gsi, gimple_build_nop (), false);
2279 return;
2282 if (gimple_omp_task_taskloop_p (stmt))
2283 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2285 ctx = new_omp_context (stmt, outer_ctx);
2287 if (gimple_omp_task_taskwait_p (stmt))
2289 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2290 return;
2293 taskreg_contexts.safe_push (ctx);
2294 if (taskreg_nesting_level > 1)
2295 ctx->is_nested = true;
2296 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2297 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2298 name = create_tmp_var_name (".omp_data_s");
2299 name = build_decl (gimple_location (stmt),
2300 TYPE_DECL, name, ctx->record_type);
2301 DECL_ARTIFICIAL (name) = 1;
2302 DECL_NAMELESS (name) = 1;
2303 TYPE_NAME (ctx->record_type) = name;
2304 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2305 create_omp_child_function (ctx, false);
2306 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2308 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2310 if (ctx->srecord_type)
2312 name = create_tmp_var_name (".omp_data_a");
2313 name = build_decl (gimple_location (stmt),
2314 TYPE_DECL, name, ctx->srecord_type);
2315 DECL_ARTIFICIAL (name) = 1;
2316 DECL_NAMELESS (name) = 1;
2317 TYPE_NAME (ctx->srecord_type) = name;
2318 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2319 create_omp_child_function (ctx, true);
2322 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2324 if (TYPE_FIELDS (ctx->record_type) == NULL)
2326 ctx->record_type = ctx->receiver_decl = NULL;
2327 t = build_int_cst (long_integer_type_node, 0);
2328 gimple_omp_task_set_arg_size (stmt, t);
2329 t = build_int_cst (long_integer_type_node, 1);
2330 gimple_omp_task_set_arg_align (stmt, t);
2334 /* Helper function for finish_taskreg_scan, called through walk_tree.
2335 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2336 tree, replace it in the expression. */
2338 static tree
2339 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2341 if (VAR_P (*tp))
2343 omp_context *ctx = (omp_context *) data;
2344 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2345 if (t != *tp)
2347 if (DECL_HAS_VALUE_EXPR_P (t))
2348 t = unshare_expr (DECL_VALUE_EXPR (t));
2349 *tp = t;
2351 *walk_subtrees = 0;
2353 else if (IS_TYPE_OR_DECL_P (*tp))
2354 *walk_subtrees = 0;
2355 return NULL_TREE;
2358 /* If any decls have been made addressable during scan_omp,
2359 adjust their fields if needed, and layout record types
2360 of parallel/task constructs. */
2362 static void
2363 finish_taskreg_scan (omp_context *ctx)
2365 if (ctx->record_type == NULL_TREE)
2366 return;
2368 /* If any task_shared_vars were needed, verify all
2369 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2370 statements if use_pointer_for_field hasn't changed
2371 because of that. If it did, update field types now. */
2372 if (task_shared_vars)
2374 tree c;
2376 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2377 c; c = OMP_CLAUSE_CHAIN (c))
2378 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2379 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2381 tree decl = OMP_CLAUSE_DECL (c);
2383 /* Global variables don't need to be copied,
2384 the receiver side will use them directly. */
2385 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2386 continue;
2387 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2388 || !use_pointer_for_field (decl, ctx))
2389 continue;
2390 tree field = lookup_field (decl, ctx);
2391 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2392 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2393 continue;
2394 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2395 TREE_THIS_VOLATILE (field) = 0;
2396 DECL_USER_ALIGN (field) = 0;
2397 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2398 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2399 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2400 if (ctx->srecord_type)
2402 tree sfield = lookup_sfield (decl, ctx);
2403 TREE_TYPE (sfield) = TREE_TYPE (field);
2404 TREE_THIS_VOLATILE (sfield) = 0;
2405 DECL_USER_ALIGN (sfield) = 0;
2406 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2407 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2408 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2413 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2415 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2416 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2417 if (c)
2419 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2420 expects to find it at the start of data. */
2421 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2422 tree *p = &TYPE_FIELDS (ctx->record_type);
2423 while (*p)
2424 if (*p == f)
2426 *p = DECL_CHAIN (*p);
2427 break;
2429 else
2430 p = &DECL_CHAIN (*p);
2431 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2432 TYPE_FIELDS (ctx->record_type) = f;
2434 layout_type (ctx->record_type);
2435 fixup_child_record_type (ctx);
2437 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2439 layout_type (ctx->record_type);
2440 fixup_child_record_type (ctx);
2442 else
2444 location_t loc = gimple_location (ctx->stmt);
2445 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2446 tree detach_clause
2447 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2448 OMP_CLAUSE_DETACH);
2449 /* Move VLA fields to the end. */
2450 p = &TYPE_FIELDS (ctx->record_type);
2451 while (*p)
2452 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2453 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2455 *q = *p;
2456 *p = TREE_CHAIN (*p);
2457 TREE_CHAIN (*q) = NULL_TREE;
2458 q = &TREE_CHAIN (*q);
2460 else
2461 p = &DECL_CHAIN (*p);
2462 *p = vla_fields;
2463 if (gimple_omp_task_taskloop_p (ctx->stmt))
2465 /* Move fields corresponding to first and second _looptemp_
2466 clause first. There are filled by GOMP_taskloop
2467 and thus need to be in specific positions. */
2468 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2469 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2470 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2471 OMP_CLAUSE__LOOPTEMP_);
2472 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2473 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2474 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2475 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2476 p = &TYPE_FIELDS (ctx->record_type);
2477 while (*p)
2478 if (*p == f1 || *p == f2 || *p == f3)
2479 *p = DECL_CHAIN (*p);
2480 else
2481 p = &DECL_CHAIN (*p);
2482 DECL_CHAIN (f1) = f2;
2483 if (c3)
2485 DECL_CHAIN (f2) = f3;
2486 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2488 else
2489 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2490 TYPE_FIELDS (ctx->record_type) = f1;
2491 if (ctx->srecord_type)
2493 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2494 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2495 if (c3)
2496 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2497 p = &TYPE_FIELDS (ctx->srecord_type);
2498 while (*p)
2499 if (*p == f1 || *p == f2 || *p == f3)
2500 *p = DECL_CHAIN (*p);
2501 else
2502 p = &DECL_CHAIN (*p);
2503 DECL_CHAIN (f1) = f2;
2504 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2505 if (c3)
2507 DECL_CHAIN (f2) = f3;
2508 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2510 else
2511 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2512 TYPE_FIELDS (ctx->srecord_type) = f1;
2515 if (detach_clause)
2517 tree c, field;
2519 /* Look for a firstprivate clause with the detach event handle. */
2520 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2521 c; c = OMP_CLAUSE_CHAIN (c))
2523 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2524 continue;
2525 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2526 == OMP_CLAUSE_DECL (detach_clause))
2527 break;
2530 gcc_assert (c);
2531 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2533 /* Move field corresponding to the detach clause first.
2534 This is filled by GOMP_task and needs to be in a
2535 specific position. */
2536 p = &TYPE_FIELDS (ctx->record_type);
2537 while (*p)
2538 if (*p == field)
2539 *p = DECL_CHAIN (*p);
2540 else
2541 p = &DECL_CHAIN (*p);
2542 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2543 TYPE_FIELDS (ctx->record_type) = field;
2544 if (ctx->srecord_type)
2546 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2547 p = &TYPE_FIELDS (ctx->srecord_type);
2548 while (*p)
2549 if (*p == field)
2550 *p = DECL_CHAIN (*p);
2551 else
2552 p = &DECL_CHAIN (*p);
2553 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2554 TYPE_FIELDS (ctx->srecord_type) = field;
2557 layout_type (ctx->record_type);
2558 fixup_child_record_type (ctx);
2559 if (ctx->srecord_type)
2560 layout_type (ctx->srecord_type);
2561 tree t = fold_convert_loc (loc, long_integer_type_node,
2562 TYPE_SIZE_UNIT (ctx->record_type));
2563 if (TREE_CODE (t) != INTEGER_CST)
2565 t = unshare_expr (t);
2566 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2568 gimple_omp_task_set_arg_size (ctx->stmt, t);
2569 t = build_int_cst (long_integer_type_node,
2570 TYPE_ALIGN_UNIT (ctx->record_type));
2571 gimple_omp_task_set_arg_align (ctx->stmt, t);
2575 /* Find the enclosing offload context. */
2577 static omp_context *
2578 enclosing_target_ctx (omp_context *ctx)
2580 for (; ctx; ctx = ctx->outer)
2581 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2582 break;
2584 return ctx;
2587 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2588 construct.
2589 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2591 static bool
2592 ctx_in_oacc_kernels_region (omp_context *ctx)
2594 for (;ctx != NULL; ctx = ctx->outer)
2596 gimple *stmt = ctx->stmt;
2597 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2598 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2599 return true;
2602 return false;
2605 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2606 (This doesn't include OpenACC 'kernels' decomposed parts.)
2607 Until kernels handling moves to use the same loop indirection
2608 scheme as parallel, we need to do this checking early. */
2610 static unsigned
2611 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2613 bool checking = true;
2614 unsigned outer_mask = 0;
2615 unsigned this_mask = 0;
2616 bool has_seq = false, has_auto = false;
2618 if (ctx->outer)
2619 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2620 if (!stmt)
2622 checking = false;
2623 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2624 return outer_mask;
2625 stmt = as_a <gomp_for *> (ctx->stmt);
2628 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2630 switch (OMP_CLAUSE_CODE (c))
2632 case OMP_CLAUSE_GANG:
2633 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2634 break;
2635 case OMP_CLAUSE_WORKER:
2636 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2637 break;
2638 case OMP_CLAUSE_VECTOR:
2639 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2640 break;
2641 case OMP_CLAUSE_SEQ:
2642 has_seq = true;
2643 break;
2644 case OMP_CLAUSE_AUTO:
2645 has_auto = true;
2646 break;
2647 default:
2648 break;
2652 if (checking)
2654 if (has_seq && (this_mask || has_auto))
2655 error_at (gimple_location (stmt), "%<seq%> overrides other"
2656 " OpenACC loop specifiers");
2657 else if (has_auto && this_mask)
2658 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2659 " OpenACC loop specifiers");
2661 if (this_mask & outer_mask)
2662 error_at (gimple_location (stmt), "inner loop uses same"
2663 " OpenACC parallelism as containing loop");
2666 return outer_mask | this_mask;
2669 /* Scan a GIMPLE_OMP_FOR. */
2671 static omp_context *
2672 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2674 omp_context *ctx;
2675 size_t i;
2676 tree clauses = gimple_omp_for_clauses (stmt);
2678 ctx = new_omp_context (stmt, outer_ctx);
2680 if (is_gimple_omp_oacc (stmt))
2682 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2684 if (!(tgt && is_oacc_kernels (tgt)))
2685 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2687 tree c_op0;
2688 switch (OMP_CLAUSE_CODE (c))
2690 case OMP_CLAUSE_GANG:
2691 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2692 break;
2694 case OMP_CLAUSE_WORKER:
2695 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2696 break;
2698 case OMP_CLAUSE_VECTOR:
2699 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2700 break;
2702 default:
2703 continue;
2706 if (c_op0)
2708 /* By construction, this is impossible for OpenACC 'kernels'
2709 decomposed parts. */
2710 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2712 error_at (OMP_CLAUSE_LOCATION (c),
2713 "argument not permitted on %qs clause",
2714 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2715 if (tgt)
2716 inform (gimple_location (tgt->stmt),
2717 "enclosing parent compute construct");
2718 else if (oacc_get_fn_attrib (current_function_decl))
2719 inform (DECL_SOURCE_LOCATION (current_function_decl),
2720 "enclosing routine");
2721 else
2722 gcc_unreachable ();
2726 if (tgt && is_oacc_kernels (tgt))
2727 check_oacc_kernel_gwv (stmt, ctx);
2729 /* Collect all variables named in reductions on this loop. Ensure
2730 that, if this loop has a reduction on some variable v, and there is
2731 a reduction on v somewhere in an outer context, then there is a
2732 reduction on v on all intervening loops as well. */
2733 tree local_reduction_clauses = NULL;
2734 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2736 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2737 local_reduction_clauses
2738 = tree_cons (NULL, c, local_reduction_clauses);
2740 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2741 ctx->outer_reduction_clauses
2742 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2743 ctx->outer->outer_reduction_clauses);
2744 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2745 tree local_iter = local_reduction_clauses;
2746 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2748 tree local_clause = TREE_VALUE (local_iter);
2749 tree local_var = OMP_CLAUSE_DECL (local_clause);
2750 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2751 bool have_outer_reduction = false;
2752 tree ctx_iter = outer_reduction_clauses;
2753 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2755 tree outer_clause = TREE_VALUE (ctx_iter);
2756 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2757 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2758 if (outer_var == local_var && outer_op != local_op)
2760 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2761 "conflicting reduction operations for %qE",
2762 local_var);
2763 inform (OMP_CLAUSE_LOCATION (outer_clause),
2764 "location of the previous reduction for %qE",
2765 outer_var);
2767 if (outer_var == local_var)
2769 have_outer_reduction = true;
2770 break;
2773 if (have_outer_reduction)
2775 /* There is a reduction on outer_var both on this loop and on
2776 some enclosing loop. Walk up the context tree until such a
2777 loop with a reduction on outer_var is found, and complain
2778 about all intervening loops that do not have such a
2779 reduction. */
2780 struct omp_context *curr_loop = ctx->outer;
2781 bool found = false;
2782 while (curr_loop != NULL)
2784 tree curr_iter = curr_loop->local_reduction_clauses;
2785 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2787 tree curr_clause = TREE_VALUE (curr_iter);
2788 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2789 if (curr_var == local_var)
2791 found = true;
2792 break;
2795 if (!found)
2796 warning_at (gimple_location (curr_loop->stmt), 0,
2797 "nested loop in reduction needs "
2798 "reduction clause for %qE",
2799 local_var);
2800 else
2801 break;
2802 curr_loop = curr_loop->outer;
2806 ctx->local_reduction_clauses = local_reduction_clauses;
2807 ctx->outer_reduction_clauses
2808 = chainon (unshare_expr (ctx->local_reduction_clauses),
2809 ctx->outer_reduction_clauses);
2811 if (tgt && is_oacc_kernels (tgt))
2813 /* Strip out reductions, as they are not handled yet. */
2814 tree *prev_ptr = &clauses;
2816 while (tree probe = *prev_ptr)
2818 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2820 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2821 *prev_ptr = *next_ptr;
2822 else
2823 prev_ptr = next_ptr;
2826 gimple_omp_for_set_clauses (stmt, clauses);
2830 scan_sharing_clauses (clauses, ctx);
2832 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2833 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2835 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2836 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2837 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2838 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2840 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2841 return ctx;
2844 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2846 static void
2847 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2848 omp_context *outer_ctx)
2850 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2851 gsi_replace (gsi, bind, false);
2852 gimple_seq seq = NULL;
2853 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2854 tree cond = create_tmp_var_raw (integer_type_node);
2855 DECL_CONTEXT (cond) = current_function_decl;
2856 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2857 gimple_bind_set_vars (bind, cond);
2858 gimple_call_set_lhs (g, cond);
2859 gimple_seq_add_stmt (&seq, g);
2860 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2861 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2862 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2863 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2864 gimple_seq_add_stmt (&seq, g);
2865 g = gimple_build_label (lab1);
2866 gimple_seq_add_stmt (&seq, g);
2867 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2868 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2869 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2870 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2871 gimple_omp_for_set_clauses (new_stmt, clause);
2872 gimple_seq_add_stmt (&seq, new_stmt);
2873 g = gimple_build_goto (lab3);
2874 gimple_seq_add_stmt (&seq, g);
2875 g = gimple_build_label (lab2);
2876 gimple_seq_add_stmt (&seq, g);
2877 gimple_seq_add_stmt (&seq, stmt);
2878 g = gimple_build_label (lab3);
2879 gimple_seq_add_stmt (&seq, g);
2880 gimple_bind_set_body (bind, seq);
2881 update_stmt (bind);
2882 scan_omp_for (new_stmt, outer_ctx);
2883 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2886 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2887 struct walk_stmt_info *);
2888 static omp_context *maybe_lookup_ctx (gimple *);
2890 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2891 for scan phase loop. */
2893 static void
2894 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2895 omp_context *outer_ctx)
2897 /* The only change between inclusive and exclusive scan will be
2898 within the first simd loop, so just use inclusive in the
2899 worksharing loop. */
2900 outer_ctx->scan_inclusive = true;
2901 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2902 OMP_CLAUSE_DECL (c) = integer_zero_node;
2904 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2905 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2906 gsi_replace (gsi, input_stmt, false);
2907 gimple_seq input_body = NULL;
2908 gimple_seq_add_stmt (&input_body, stmt);
2909 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2911 gimple_stmt_iterator input1_gsi = gsi_none ();
2912 struct walk_stmt_info wi;
2913 memset (&wi, 0, sizeof (wi));
2914 wi.val_only = true;
2915 wi.info = (void *) &input1_gsi;
2916 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2917 gcc_assert (!gsi_end_p (input1_gsi));
2919 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2920 gsi_next (&input1_gsi);
2921 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2922 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2923 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2924 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2925 std::swap (input_stmt1, scan_stmt1);
2927 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2928 gimple_omp_set_body (input_stmt1, NULL);
2930 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2931 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2933 gimple_omp_set_body (input_stmt1, input_body1);
2934 gimple_omp_set_body (scan_stmt1, NULL);
2936 gimple_stmt_iterator input2_gsi = gsi_none ();
2937 memset (&wi, 0, sizeof (wi));
2938 wi.val_only = true;
2939 wi.info = (void *) &input2_gsi;
2940 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2941 NULL, &wi);
2942 gcc_assert (!gsi_end_p (input2_gsi));
2944 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2945 gsi_next (&input2_gsi);
2946 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2947 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2948 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2949 std::swap (input_stmt2, scan_stmt2);
2951 gimple_omp_set_body (input_stmt2, NULL);
2953 gimple_omp_set_body (input_stmt, input_body);
2954 gimple_omp_set_body (scan_stmt, scan_body);
2956 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2957 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2959 ctx = new_omp_context (scan_stmt, outer_ctx);
2960 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2962 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2965 /* Scan an OpenMP sections directive. */
2967 static void
2968 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2970 omp_context *ctx;
2972 ctx = new_omp_context (stmt, outer_ctx);
2973 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2974 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2977 /* Scan an OpenMP single directive. */
2979 static void
2980 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2982 omp_context *ctx;
2983 tree name;
2985 ctx = new_omp_context (stmt, outer_ctx);
2986 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2987 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2988 name = create_tmp_var_name (".omp_copy_s");
2989 name = build_decl (gimple_location (stmt),
2990 TYPE_DECL, name, ctx->record_type);
2991 TYPE_NAME (ctx->record_type) = name;
2993 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2994 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2996 if (TYPE_FIELDS (ctx->record_type) == NULL)
2997 ctx->record_type = NULL;
2998 else
2999 layout_type (ctx->record_type);
3002 /* Scan a GIMPLE_OMP_TARGET. */
3004 static void
3005 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3007 omp_context *ctx;
3008 tree name;
3009 bool offloaded = is_gimple_omp_offloaded (stmt);
3010 tree clauses = gimple_omp_target_clauses (stmt);
3012 ctx = new_omp_context (stmt, outer_ctx);
3013 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3014 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3015 name = create_tmp_var_name (".omp_data_t");
3016 name = build_decl (gimple_location (stmt),
3017 TYPE_DECL, name, ctx->record_type);
3018 DECL_ARTIFICIAL (name) = 1;
3019 DECL_NAMELESS (name) = 1;
3020 TYPE_NAME (ctx->record_type) = name;
3021 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3023 if (offloaded)
3025 create_omp_child_function (ctx, false);
3026 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3029 scan_sharing_clauses (clauses, ctx);
3030 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3032 if (TYPE_FIELDS (ctx->record_type) == NULL)
3033 ctx->record_type = ctx->receiver_decl = NULL;
3034 else
3036 TYPE_FIELDS (ctx->record_type)
3037 = nreverse (TYPE_FIELDS (ctx->record_type));
3038 if (flag_checking)
3040 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3041 for (tree field = TYPE_FIELDS (ctx->record_type);
3042 field;
3043 field = DECL_CHAIN (field))
3044 gcc_assert (DECL_ALIGN (field) == align);
3046 layout_type (ctx->record_type);
3047 if (offloaded)
3048 fixup_child_record_type (ctx);
3051 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3053 error_at (gimple_location (stmt),
3054 "%<target%> construct with nested %<teams%> construct "
3055 "contains directives outside of the %<teams%> construct");
3056 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3060 /* Scan an OpenMP teams directive. */
3062 static void
3063 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3065 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3067 if (!gimple_omp_teams_host (stmt))
3069 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3070 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3071 return;
3073 taskreg_contexts.safe_push (ctx);
3074 gcc_assert (taskreg_nesting_level == 1);
3075 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3076 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3077 tree name = create_tmp_var_name (".omp_data_s");
3078 name = build_decl (gimple_location (stmt),
3079 TYPE_DECL, name, ctx->record_type);
3080 DECL_ARTIFICIAL (name) = 1;
3081 DECL_NAMELESS (name) = 1;
3082 TYPE_NAME (ctx->record_type) = name;
3083 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3084 create_omp_child_function (ctx, false);
3085 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3087 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3088 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3090 if (TYPE_FIELDS (ctx->record_type) == NULL)
3091 ctx->record_type = ctx->receiver_decl = NULL;
3094 /* Check nesting restrictions. */
3095 static bool
3096 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3098 tree c;
3100 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3101 inside an OpenACC CTX. */
3102 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3103 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3104 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3106 else if (!(is_gimple_omp (stmt)
3107 && is_gimple_omp_oacc (stmt)))
3109 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3111 error_at (gimple_location (stmt),
3112 "non-OpenACC construct inside of OpenACC routine");
3113 return false;
3115 else
3116 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3117 if (is_gimple_omp (octx->stmt)
3118 && is_gimple_omp_oacc (octx->stmt))
3120 error_at (gimple_location (stmt),
3121 "non-OpenACC construct inside of OpenACC region");
3122 return false;
3126 if (ctx != NULL)
3128 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3129 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3131 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3132 OMP_CLAUSE_DEVICE);
3133 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3135 error_at (gimple_location (stmt),
3136 "OpenMP constructs are not allowed in target region "
3137 "with %<ancestor%>");
3138 return false;
3141 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3142 ctx->teams_nested_p = true;
3143 else
3144 ctx->nonteams_nested_p = true;
3146 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3147 && ctx->outer
3148 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3149 ctx = ctx->outer;
3150 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3151 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3152 && !ctx->loop_p)
3154 c = NULL_TREE;
3155 if (ctx->order_concurrent
3156 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3157 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3158 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3160 error_at (gimple_location (stmt),
3161 "OpenMP constructs other than %<parallel%>, %<loop%>"
3162 " or %<simd%> may not be nested inside a region with"
3163 " the %<order(concurrent)%> clause");
3164 return false;
3166 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3168 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3169 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3171 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3172 && (ctx->outer == NULL
3173 || !gimple_omp_for_combined_into_p (ctx->stmt)
3174 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3175 || (gimple_omp_for_kind (ctx->outer->stmt)
3176 != GF_OMP_FOR_KIND_FOR)
3177 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3179 error_at (gimple_location (stmt),
3180 "%<ordered simd threads%> must be closely "
3181 "nested inside of %<%s simd%> region",
3182 lang_GNU_Fortran () ? "do" : "for");
3183 return false;
3185 return true;
3188 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3189 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3190 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3191 return true;
3192 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3193 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3194 return true;
3195 error_at (gimple_location (stmt),
3196 "OpenMP constructs other than "
3197 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3198 "not be nested inside %<simd%> region");
3199 return false;
3201 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3203 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3204 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3205 && omp_find_clause (gimple_omp_for_clauses (stmt),
3206 OMP_CLAUSE_BIND) == NULL_TREE))
3207 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3209 error_at (gimple_location (stmt),
3210 "only %<distribute%>, %<parallel%> or %<loop%> "
3211 "regions are allowed to be strictly nested inside "
3212 "%<teams%> region");
3213 return false;
3216 else if (ctx->order_concurrent
3217 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3218 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3219 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3220 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3222 if (ctx->loop_p)
3223 error_at (gimple_location (stmt),
3224 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3225 "%<simd%> may not be nested inside a %<loop%> region");
3226 else
3227 error_at (gimple_location (stmt),
3228 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3229 "%<simd%> may not be nested inside a region with "
3230 "the %<order(concurrent)%> clause");
3231 return false;
3234 switch (gimple_code (stmt))
3236 case GIMPLE_OMP_FOR:
3237 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3238 return true;
3239 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3241 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3243 error_at (gimple_location (stmt),
3244 "%<distribute%> region must be strictly nested "
3245 "inside %<teams%> construct");
3246 return false;
3248 return true;
3250 /* We split taskloop into task and nested taskloop in it. */
3251 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3252 return true;
3253 /* For now, hope this will change and loop bind(parallel) will not
3254 be allowed in lots of contexts. */
3255 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3256 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3257 return true;
3258 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3260 bool ok = false;
3262 if (ctx)
3263 switch (gimple_code (ctx->stmt))
3265 case GIMPLE_OMP_FOR:
3266 ok = (gimple_omp_for_kind (ctx->stmt)
3267 == GF_OMP_FOR_KIND_OACC_LOOP);
3268 break;
3270 case GIMPLE_OMP_TARGET:
3271 switch (gimple_omp_target_kind (ctx->stmt))
3273 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3274 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3275 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3276 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3277 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3278 ok = true;
3279 break;
3281 default:
3282 break;
3285 default:
3286 break;
3288 else if (oacc_get_fn_attrib (current_function_decl))
3289 ok = true;
3290 if (!ok)
3292 error_at (gimple_location (stmt),
3293 "OpenACC loop directive must be associated with"
3294 " an OpenACC compute region");
3295 return false;
3298 /* FALLTHRU */
3299 case GIMPLE_CALL:
3300 if (is_gimple_call (stmt)
3301 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3302 == BUILT_IN_GOMP_CANCEL
3303 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3304 == BUILT_IN_GOMP_CANCELLATION_POINT))
3306 const char *bad = NULL;
3307 const char *kind = NULL;
3308 const char *construct
3309 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3310 == BUILT_IN_GOMP_CANCEL)
3311 ? "cancel"
3312 : "cancellation point";
3313 if (ctx == NULL)
3315 error_at (gimple_location (stmt), "orphaned %qs construct",
3316 construct);
3317 return false;
3319 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3320 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3321 : 0)
3323 case 1:
3324 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3325 bad = "parallel";
3326 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3327 == BUILT_IN_GOMP_CANCEL
3328 && !integer_zerop (gimple_call_arg (stmt, 1)))
3329 ctx->cancellable = true;
3330 kind = "parallel";
3331 break;
3332 case 2:
3333 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3334 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3335 bad = "for";
3336 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3337 == BUILT_IN_GOMP_CANCEL
3338 && !integer_zerop (gimple_call_arg (stmt, 1)))
3340 ctx->cancellable = true;
3341 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3342 OMP_CLAUSE_NOWAIT))
3343 warning_at (gimple_location (stmt), 0,
3344 "%<cancel for%> inside "
3345 "%<nowait%> for construct");
3346 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3347 OMP_CLAUSE_ORDERED))
3348 warning_at (gimple_location (stmt), 0,
3349 "%<cancel for%> inside "
3350 "%<ordered%> for construct");
3352 kind = "for";
3353 break;
3354 case 4:
3355 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3356 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3357 bad = "sections";
3358 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3359 == BUILT_IN_GOMP_CANCEL
3360 && !integer_zerop (gimple_call_arg (stmt, 1)))
3362 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3364 ctx->cancellable = true;
3365 if (omp_find_clause (gimple_omp_sections_clauses
3366 (ctx->stmt),
3367 OMP_CLAUSE_NOWAIT))
3368 warning_at (gimple_location (stmt), 0,
3369 "%<cancel sections%> inside "
3370 "%<nowait%> sections construct");
3372 else
3374 gcc_assert (ctx->outer
3375 && gimple_code (ctx->outer->stmt)
3376 == GIMPLE_OMP_SECTIONS);
3377 ctx->outer->cancellable = true;
3378 if (omp_find_clause (gimple_omp_sections_clauses
3379 (ctx->outer->stmt),
3380 OMP_CLAUSE_NOWAIT))
3381 warning_at (gimple_location (stmt), 0,
3382 "%<cancel sections%> inside "
3383 "%<nowait%> sections construct");
3386 kind = "sections";
3387 break;
3388 case 8:
3389 if (!is_task_ctx (ctx)
3390 && (!is_taskloop_ctx (ctx)
3391 || ctx->outer == NULL
3392 || !is_task_ctx (ctx->outer)))
3393 bad = "task";
3394 else
3396 for (omp_context *octx = ctx->outer;
3397 octx; octx = octx->outer)
3399 switch (gimple_code (octx->stmt))
3401 case GIMPLE_OMP_TASKGROUP:
3402 break;
3403 case GIMPLE_OMP_TARGET:
3404 if (gimple_omp_target_kind (octx->stmt)
3405 != GF_OMP_TARGET_KIND_REGION)
3406 continue;
3407 /* FALLTHRU */
3408 case GIMPLE_OMP_PARALLEL:
3409 case GIMPLE_OMP_TEAMS:
3410 error_at (gimple_location (stmt),
3411 "%<%s taskgroup%> construct not closely "
3412 "nested inside of %<taskgroup%> region",
3413 construct);
3414 return false;
3415 case GIMPLE_OMP_TASK:
3416 if (gimple_omp_task_taskloop_p (octx->stmt)
3417 && octx->outer
3418 && is_taskloop_ctx (octx->outer))
3420 tree clauses
3421 = gimple_omp_for_clauses (octx->outer->stmt);
3422 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3423 break;
3425 continue;
3426 default:
3427 continue;
3429 break;
3431 ctx->cancellable = true;
3433 kind = "taskgroup";
3434 break;
3435 default:
3436 error_at (gimple_location (stmt), "invalid arguments");
3437 return false;
3439 if (bad)
3441 error_at (gimple_location (stmt),
3442 "%<%s %s%> construct not closely nested inside of %qs",
3443 construct, kind, bad);
3444 return false;
3447 /* FALLTHRU */
3448 case GIMPLE_OMP_SECTIONS:
3449 case GIMPLE_OMP_SINGLE:
3450 for (; ctx != NULL; ctx = ctx->outer)
3451 switch (gimple_code (ctx->stmt))
3453 case GIMPLE_OMP_FOR:
3454 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3455 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3456 break;
3457 /* FALLTHRU */
3458 case GIMPLE_OMP_SECTIONS:
3459 case GIMPLE_OMP_SINGLE:
3460 case GIMPLE_OMP_ORDERED:
3461 case GIMPLE_OMP_MASTER:
3462 case GIMPLE_OMP_MASKED:
3463 case GIMPLE_OMP_TASK:
3464 case GIMPLE_OMP_CRITICAL:
3465 if (is_gimple_call (stmt))
3467 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3468 != BUILT_IN_GOMP_BARRIER)
3469 return true;
3470 error_at (gimple_location (stmt),
3471 "barrier region may not be closely nested inside "
3472 "of work-sharing, %<loop%>, %<critical%>, "
3473 "%<ordered%>, %<master%>, %<masked%>, explicit "
3474 "%<task%> or %<taskloop%> region");
3475 return false;
3477 error_at (gimple_location (stmt),
3478 "work-sharing region may not be closely nested inside "
3479 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3480 "%<master%>, %<masked%>, explicit %<task%> or "
3481 "%<taskloop%> region");
3482 return false;
3483 case GIMPLE_OMP_PARALLEL:
3484 case GIMPLE_OMP_TEAMS:
3485 return true;
3486 case GIMPLE_OMP_TARGET:
3487 if (gimple_omp_target_kind (ctx->stmt)
3488 == GF_OMP_TARGET_KIND_REGION)
3489 return true;
3490 break;
3491 default:
3492 break;
3494 break;
3495 case GIMPLE_OMP_MASTER:
3496 case GIMPLE_OMP_MASKED:
3497 for (; ctx != NULL; ctx = ctx->outer)
3498 switch (gimple_code (ctx->stmt))
3500 case GIMPLE_OMP_FOR:
3501 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3502 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3503 break;
3504 /* FALLTHRU */
3505 case GIMPLE_OMP_SECTIONS:
3506 case GIMPLE_OMP_SINGLE:
3507 case GIMPLE_OMP_TASK:
3508 error_at (gimple_location (stmt),
3509 "%qs region may not be closely nested inside "
3510 "of work-sharing, %<loop%>, explicit %<task%> or "
3511 "%<taskloop%> region",
3512 gimple_code (stmt) == GIMPLE_OMP_MASTER
3513 ? "master" : "masked");
3514 return false;
3515 case GIMPLE_OMP_PARALLEL:
3516 case GIMPLE_OMP_TEAMS:
3517 return true;
3518 case GIMPLE_OMP_TARGET:
3519 if (gimple_omp_target_kind (ctx->stmt)
3520 == GF_OMP_TARGET_KIND_REGION)
3521 return true;
3522 break;
3523 default:
3524 break;
3526 break;
3527 case GIMPLE_OMP_SCOPE:
3528 for (; ctx != NULL; ctx = ctx->outer)
3529 switch (gimple_code (ctx->stmt))
3531 case GIMPLE_OMP_FOR:
3532 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3533 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3534 break;
3535 /* FALLTHRU */
3536 case GIMPLE_OMP_SECTIONS:
3537 case GIMPLE_OMP_SINGLE:
3538 case GIMPLE_OMP_TASK:
3539 case GIMPLE_OMP_CRITICAL:
3540 case GIMPLE_OMP_ORDERED:
3541 case GIMPLE_OMP_MASTER:
3542 case GIMPLE_OMP_MASKED:
3543 error_at (gimple_location (stmt),
3544 "%<scope%> region may not be closely nested inside "
3545 "of work-sharing, %<loop%>, explicit %<task%>, "
3546 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3547 "or %<masked%> region");
3548 return false;
3549 case GIMPLE_OMP_PARALLEL:
3550 case GIMPLE_OMP_TEAMS:
3551 return true;
3552 case GIMPLE_OMP_TARGET:
3553 if (gimple_omp_target_kind (ctx->stmt)
3554 == GF_OMP_TARGET_KIND_REGION)
3555 return true;
3556 break;
3557 default:
3558 break;
3560 break;
3561 case GIMPLE_OMP_TASK:
3562 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3563 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3564 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3565 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3567 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3568 error_at (OMP_CLAUSE_LOCATION (c),
3569 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3570 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3571 return false;
3573 break;
3574 case GIMPLE_OMP_ORDERED:
3575 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3576 c; c = OMP_CLAUSE_CHAIN (c))
3578 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3580 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3581 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3582 continue;
3584 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3585 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3586 || kind == OMP_CLAUSE_DEPEND_SINK)
3588 tree oclause;
3589 /* Look for containing ordered(N) loop. */
3590 if (ctx == NULL
3591 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3592 || (oclause
3593 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3594 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3596 error_at (OMP_CLAUSE_LOCATION (c),
3597 "%<ordered%> construct with %<depend%> clause "
3598 "must be closely nested inside an %<ordered%> "
3599 "loop");
3600 return false;
3602 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3604 error_at (OMP_CLAUSE_LOCATION (c),
3605 "%<ordered%> construct with %<depend%> clause "
3606 "must be closely nested inside a loop with "
3607 "%<ordered%> clause with a parameter");
3608 return false;
3611 else
3613 error_at (OMP_CLAUSE_LOCATION (c),
3614 "invalid depend kind in omp %<ordered%> %<depend%>");
3615 return false;
3618 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3619 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3621 /* ordered simd must be closely nested inside of simd region,
3622 and simd region must not encounter constructs other than
3623 ordered simd, therefore ordered simd may be either orphaned,
3624 or ctx->stmt must be simd. The latter case is handled already
3625 earlier. */
3626 if (ctx != NULL)
3628 error_at (gimple_location (stmt),
3629 "%<ordered%> %<simd%> must be closely nested inside "
3630 "%<simd%> region");
3631 return false;
3634 for (; ctx != NULL; ctx = ctx->outer)
3635 switch (gimple_code (ctx->stmt))
3637 case GIMPLE_OMP_CRITICAL:
3638 case GIMPLE_OMP_TASK:
3639 case GIMPLE_OMP_ORDERED:
3640 ordered_in_taskloop:
3641 error_at (gimple_location (stmt),
3642 "%<ordered%> region may not be closely nested inside "
3643 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3644 "%<taskloop%> region");
3645 return false;
3646 case GIMPLE_OMP_FOR:
3647 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3648 goto ordered_in_taskloop;
3649 tree o;
3650 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3651 OMP_CLAUSE_ORDERED);
3652 if (o == NULL)
3654 error_at (gimple_location (stmt),
3655 "%<ordered%> region must be closely nested inside "
3656 "a loop region with an %<ordered%> clause");
3657 return false;
3659 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3660 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3662 error_at (gimple_location (stmt),
3663 "%<ordered%> region without %<depend%> clause may "
3664 "not be closely nested inside a loop region with "
3665 "an %<ordered%> clause with a parameter");
3666 return false;
3668 return true;
3669 case GIMPLE_OMP_TARGET:
3670 if (gimple_omp_target_kind (ctx->stmt)
3671 != GF_OMP_TARGET_KIND_REGION)
3672 break;
3673 /* FALLTHRU */
3674 case GIMPLE_OMP_PARALLEL:
3675 case GIMPLE_OMP_TEAMS:
3676 error_at (gimple_location (stmt),
3677 "%<ordered%> region must be closely nested inside "
3678 "a loop region with an %<ordered%> clause");
3679 return false;
3680 default:
3681 break;
3683 break;
3684 case GIMPLE_OMP_CRITICAL:
3686 tree this_stmt_name
3687 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3688 for (; ctx != NULL; ctx = ctx->outer)
3689 if (gomp_critical *other_crit
3690 = dyn_cast <gomp_critical *> (ctx->stmt))
3691 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3693 error_at (gimple_location (stmt),
3694 "%<critical%> region may not be nested inside "
3695 "a %<critical%> region with the same name");
3696 return false;
3699 break;
3700 case GIMPLE_OMP_TEAMS:
3701 if (ctx == NULL)
3702 break;
3703 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3704 || (gimple_omp_target_kind (ctx->stmt)
3705 != GF_OMP_TARGET_KIND_REGION))
3707 /* Teams construct can appear either strictly nested inside of
3708 target construct with no intervening stmts, or can be encountered
3709 only by initial task (so must not appear inside any OpenMP
3710 construct. */
3711 error_at (gimple_location (stmt),
3712 "%<teams%> construct must be closely nested inside of "
3713 "%<target%> construct or not nested in any OpenMP "
3714 "construct");
3715 return false;
3717 break;
3718 case GIMPLE_OMP_TARGET:
3719 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3721 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3722 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3724 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3725 error_at (OMP_CLAUSE_LOCATION (c),
3726 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3727 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3728 return false;
3730 if (is_gimple_omp_offloaded (stmt)
3731 && oacc_get_fn_attrib (cfun->decl) != NULL)
3733 error_at (gimple_location (stmt),
3734 "OpenACC region inside of OpenACC routine, nested "
3735 "parallelism not supported yet");
3736 return false;
3738 for (; ctx != NULL; ctx = ctx->outer)
3740 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3742 if (is_gimple_omp (stmt)
3743 && is_gimple_omp_oacc (stmt)
3744 && is_gimple_omp (ctx->stmt))
3746 error_at (gimple_location (stmt),
3747 "OpenACC construct inside of non-OpenACC region");
3748 return false;
3750 continue;
3753 const char *stmt_name, *ctx_stmt_name;
3754 switch (gimple_omp_target_kind (stmt))
3756 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3757 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3758 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3759 case GF_OMP_TARGET_KIND_ENTER_DATA:
3760 stmt_name = "target enter data"; break;
3761 case GF_OMP_TARGET_KIND_EXIT_DATA:
3762 stmt_name = "target exit data"; break;
3763 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3764 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3765 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3766 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3767 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3768 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3769 stmt_name = "enter data"; break;
3770 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3771 stmt_name = "exit data"; break;
3772 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3773 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3774 break;
3775 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3776 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3777 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3778 /* OpenACC 'kernels' decomposed parts. */
3779 stmt_name = "kernels"; break;
3780 default: gcc_unreachable ();
3782 switch (gimple_omp_target_kind (ctx->stmt))
3784 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3785 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3786 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3787 ctx_stmt_name = "parallel"; break;
3788 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3789 ctx_stmt_name = "kernels"; break;
3790 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3791 ctx_stmt_name = "serial"; break;
3792 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3793 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3794 ctx_stmt_name = "host_data"; break;
3795 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3796 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3797 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3798 /* OpenACC 'kernels' decomposed parts. */
3799 ctx_stmt_name = "kernels"; break;
3800 default: gcc_unreachable ();
3803 /* OpenACC/OpenMP mismatch? */
3804 if (is_gimple_omp_oacc (stmt)
3805 != is_gimple_omp_oacc (ctx->stmt))
3807 error_at (gimple_location (stmt),
3808 "%s %qs construct inside of %s %qs region",
3809 (is_gimple_omp_oacc (stmt)
3810 ? "OpenACC" : "OpenMP"), stmt_name,
3811 (is_gimple_omp_oacc (ctx->stmt)
3812 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3813 return false;
3815 if (is_gimple_omp_offloaded (ctx->stmt))
3817 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3818 if (is_gimple_omp_oacc (ctx->stmt))
3820 error_at (gimple_location (stmt),
3821 "%qs construct inside of %qs region",
3822 stmt_name, ctx_stmt_name);
3823 return false;
3825 else
3827 warning_at (gimple_location (stmt), 0,
3828 "%qs construct inside of %qs region",
3829 stmt_name, ctx_stmt_name);
3833 break;
3834 default:
3835 break;
3837 return true;
3841 /* Helper function scan_omp.
3843 Callback for walk_tree or operators in walk_gimple_stmt used to
3844 scan for OMP directives in TP. */
3846 static tree
3847 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3849 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3850 omp_context *ctx = (omp_context *) wi->info;
3851 tree t = *tp;
3853 switch (TREE_CODE (t))
3855 case VAR_DECL:
3856 case PARM_DECL:
3857 case LABEL_DECL:
3858 case RESULT_DECL:
3859 if (ctx)
3861 tree repl = remap_decl (t, &ctx->cb);
3862 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3863 *tp = repl;
3865 break;
3867 default:
3868 if (ctx && TYPE_P (t))
3869 *tp = remap_type (t, &ctx->cb);
3870 else if (!DECL_P (t))
3872 *walk_subtrees = 1;
3873 if (ctx)
3875 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3876 if (tem != TREE_TYPE (t))
3878 if (TREE_CODE (t) == INTEGER_CST)
3879 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3880 else
3881 TREE_TYPE (t) = tem;
3885 break;
3888 return NULL_TREE;
3891 /* Return true if FNDECL is a setjmp or a longjmp. */
3893 static bool
3894 setjmp_or_longjmp_p (const_tree fndecl)
3896 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3897 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3898 return true;
3900 tree declname = DECL_NAME (fndecl);
3901 if (!declname
3902 || (DECL_CONTEXT (fndecl) != NULL_TREE
3903 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3904 || !TREE_PUBLIC (fndecl))
3905 return false;
3907 const char *name = IDENTIFIER_POINTER (declname);
3908 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3911 /* Return true if FNDECL is an omp_* runtime API call. */
3913 static bool
3914 omp_runtime_api_call (const_tree fndecl)
3916 tree declname = DECL_NAME (fndecl);
3917 if (!declname
3918 || (DECL_CONTEXT (fndecl) != NULL_TREE
3919 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3920 || !TREE_PUBLIC (fndecl))
3921 return false;
3923 const char *name = IDENTIFIER_POINTER (declname);
3924 if (!startswith (name, "omp_"))
3925 return false;
3927 static const char *omp_runtime_apis[] =
3929 /* This array has 3 sections. First omp_* calls that don't
3930 have any suffixes. */
3931 "aligned_alloc",
3932 "aligned_calloc",
3933 "alloc",
3934 "calloc",
3935 "free",
3936 "realloc",
3937 "target_alloc",
3938 "target_associate_ptr",
3939 "target_disassociate_ptr",
3940 "target_free",
3941 "target_is_present",
3942 "target_memcpy",
3943 "target_memcpy_rect",
3944 NULL,
3945 /* Now omp_* calls that are available as omp_* and omp_*_. */
3946 "capture_affinity",
3947 "destroy_allocator",
3948 "destroy_lock",
3949 "destroy_nest_lock",
3950 "display_affinity",
3951 "fulfill_event",
3952 "get_active_level",
3953 "get_affinity_format",
3954 "get_cancellation",
3955 "get_default_allocator",
3956 "get_default_device",
3957 "get_device_num",
3958 "get_dynamic",
3959 "get_initial_device",
3960 "get_level",
3961 "get_max_active_levels",
3962 "get_max_task_priority",
3963 "get_max_teams",
3964 "get_max_threads",
3965 "get_nested",
3966 "get_num_devices",
3967 "get_num_places",
3968 "get_num_procs",
3969 "get_num_teams",
3970 "get_num_threads",
3971 "get_partition_num_places",
3972 "get_place_num",
3973 "get_proc_bind",
3974 "get_supported_active_levels",
3975 "get_team_num",
3976 "get_teams_thread_limit",
3977 "get_thread_limit",
3978 "get_thread_num",
3979 "get_wtick",
3980 "get_wtime",
3981 "in_final",
3982 "in_parallel",
3983 "init_lock",
3984 "init_nest_lock",
3985 "is_initial_device",
3986 "pause_resource",
3987 "pause_resource_all",
3988 "set_affinity_format",
3989 "set_default_allocator",
3990 "set_lock",
3991 "set_nest_lock",
3992 "test_lock",
3993 "test_nest_lock",
3994 "unset_lock",
3995 "unset_nest_lock",
3996 NULL,
3997 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3998 "display_env",
3999 "get_ancestor_thread_num",
4000 "init_allocator",
4001 "get_partition_place_nums",
4002 "get_place_num_procs",
4003 "get_place_proc_ids",
4004 "get_schedule",
4005 "get_team_size",
4006 "set_default_device",
4007 "set_dynamic",
4008 "set_max_active_levels",
4009 "set_nested",
4010 "set_num_teams",
4011 "set_num_threads",
4012 "set_schedule",
4013 "set_teams_thread_limit"
4016 int mode = 0;
4017 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4019 if (omp_runtime_apis[i] == NULL)
4021 mode++;
4022 continue;
4024 size_t len = strlen (omp_runtime_apis[i]);
4025 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4026 && (name[4 + len] == '\0'
4027 || (mode > 0
4028 && name[4 + len] == '_'
4029 && (name[4 + len + 1] == '\0'
4030 || (mode > 1
4031 && strcmp (name + 4 + len + 1, "8_") == 0)))))
4032 return true;
4034 return false;
4037 /* Helper function for scan_omp.
4039 Callback for walk_gimple_stmt used to scan for OMP directives in
4040 the current statement in GSI. */
4042 static tree
4043 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4044 struct walk_stmt_info *wi)
4046 gimple *stmt = gsi_stmt (*gsi);
4047 omp_context *ctx = (omp_context *) wi->info;
4049 if (gimple_has_location (stmt))
4050 input_location = gimple_location (stmt);
4052 /* Check the nesting restrictions. */
4053 bool remove = false;
4054 if (is_gimple_omp (stmt))
4055 remove = !check_omp_nesting_restrictions (stmt, ctx);
4056 else if (is_gimple_call (stmt))
4058 tree fndecl = gimple_call_fndecl (stmt);
4059 if (fndecl)
4061 if (ctx
4062 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4063 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4064 && setjmp_or_longjmp_p (fndecl)
4065 && !ctx->loop_p)
4067 remove = true;
4068 error_at (gimple_location (stmt),
4069 "setjmp/longjmp inside %<simd%> construct");
4071 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4072 switch (DECL_FUNCTION_CODE (fndecl))
4074 case BUILT_IN_GOMP_BARRIER:
4075 case BUILT_IN_GOMP_CANCEL:
4076 case BUILT_IN_GOMP_CANCELLATION_POINT:
4077 case BUILT_IN_GOMP_TASKYIELD:
4078 case BUILT_IN_GOMP_TASKWAIT:
4079 case BUILT_IN_GOMP_TASKGROUP_START:
4080 case BUILT_IN_GOMP_TASKGROUP_END:
4081 remove = !check_omp_nesting_restrictions (stmt, ctx);
4082 break;
4083 default:
4084 break;
4086 else if (ctx)
4088 omp_context *octx = ctx;
4089 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4090 octx = ctx->outer;
4091 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4093 remove = true;
4094 error_at (gimple_location (stmt),
4095 "OpenMP runtime API call %qD in a region with "
4096 "%<order(concurrent)%> clause", fndecl);
4098 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4099 && (gimple_omp_target_kind (ctx->stmt)
4100 == GF_OMP_TARGET_KIND_REGION))
4102 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4103 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4104 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4105 error_at (gimple_location (stmt),
4106 "OpenMP runtime API call %qD in a region with "
4107 "%<device(ancestor)%> clause", fndecl);
4112 if (remove)
4114 stmt = gimple_build_nop ();
4115 gsi_replace (gsi, stmt, false);
4118 *handled_ops_p = true;
4120 switch (gimple_code (stmt))
4122 case GIMPLE_OMP_PARALLEL:
4123 taskreg_nesting_level++;
4124 scan_omp_parallel (gsi, ctx);
4125 taskreg_nesting_level--;
4126 break;
4128 case GIMPLE_OMP_TASK:
4129 taskreg_nesting_level++;
4130 scan_omp_task (gsi, ctx);
4131 taskreg_nesting_level--;
4132 break;
4134 case GIMPLE_OMP_FOR:
4135 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4136 == GF_OMP_FOR_KIND_SIMD)
4137 && gimple_omp_for_combined_into_p (stmt)
4138 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4140 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4141 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4142 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4144 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4145 break;
4148 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4149 == GF_OMP_FOR_KIND_SIMD)
4150 && omp_maybe_offloaded_ctx (ctx)
4151 && omp_max_simt_vf ()
4152 && gimple_omp_for_collapse (stmt) == 1)
4153 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4154 else
4155 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4156 break;
4158 case GIMPLE_OMP_SCOPE:
4159 ctx = new_omp_context (stmt, ctx);
4160 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4161 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4162 break;
4164 case GIMPLE_OMP_SECTIONS:
4165 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4166 break;
4168 case GIMPLE_OMP_SINGLE:
4169 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4170 break;
4172 case GIMPLE_OMP_SCAN:
4173 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4175 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4176 ctx->scan_inclusive = true;
4177 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4178 ctx->scan_exclusive = true;
4180 /* FALLTHRU */
4181 case GIMPLE_OMP_SECTION:
4182 case GIMPLE_OMP_MASTER:
4183 case GIMPLE_OMP_ORDERED:
4184 case GIMPLE_OMP_CRITICAL:
4185 ctx = new_omp_context (stmt, ctx);
4186 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4187 break;
4189 case GIMPLE_OMP_MASKED:
4190 ctx = new_omp_context (stmt, ctx);
4191 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4192 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4193 break;
4195 case GIMPLE_OMP_TASKGROUP:
4196 ctx = new_omp_context (stmt, ctx);
4197 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4198 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4199 break;
4201 case GIMPLE_OMP_TARGET:
4202 if (is_gimple_omp_offloaded (stmt))
4204 taskreg_nesting_level++;
4205 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4206 taskreg_nesting_level--;
4208 else
4209 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4210 break;
4212 case GIMPLE_OMP_TEAMS:
4213 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4215 taskreg_nesting_level++;
4216 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4217 taskreg_nesting_level--;
4219 else
4220 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4221 break;
4223 case GIMPLE_BIND:
4225 tree var;
4227 *handled_ops_p = false;
4228 if (ctx)
4229 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4230 var ;
4231 var = DECL_CHAIN (var))
4232 insert_decl_map (&ctx->cb, var, var);
4234 break;
4235 default:
4236 *handled_ops_p = false;
4237 break;
4240 return NULL_TREE;
4244 /* Scan all the statements starting at the current statement. CTX
4245 contains context information about the OMP directives and
4246 clauses found during the scan. */
4248 static void
4249 scan_omp (gimple_seq *body_p, omp_context *ctx)
4251 location_t saved_location;
4252 struct walk_stmt_info wi;
4254 memset (&wi, 0, sizeof (wi));
4255 wi.info = ctx;
4256 wi.want_locations = true;
4258 saved_location = input_location;
4259 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4260 input_location = saved_location;
4263 /* Re-gimplification and code generation routines. */
4265 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4266 of BIND if in a method. */
4268 static void
4269 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4271 if (DECL_ARGUMENTS (current_function_decl)
4272 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4273 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4274 == POINTER_TYPE))
4276 tree vars = gimple_bind_vars (bind);
4277 for (tree *pvar = &vars; *pvar; )
4278 if (omp_member_access_dummy_var (*pvar))
4279 *pvar = DECL_CHAIN (*pvar);
4280 else
4281 pvar = &DECL_CHAIN (*pvar);
4282 gimple_bind_set_vars (bind, vars);
4286 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4287 block and its subblocks. */
4289 static void
4290 remove_member_access_dummy_vars (tree block)
4292 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4293 if (omp_member_access_dummy_var (*pvar))
4294 *pvar = DECL_CHAIN (*pvar);
4295 else
4296 pvar = &DECL_CHAIN (*pvar);
4298 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4299 remove_member_access_dummy_vars (block);
4302 /* If a context was created for STMT when it was scanned, return it. */
4304 static omp_context *
4305 maybe_lookup_ctx (gimple *stmt)
4307 splay_tree_node n;
4308 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4309 return n ? (omp_context *) n->value : NULL;
4313 /* Find the mapping for DECL in CTX or the immediately enclosing
4314 context that has a mapping for DECL.
4316 If CTX is a nested parallel directive, we may have to use the decl
4317 mappings created in CTX's parent context. Suppose that we have the
4318 following parallel nesting (variable UIDs showed for clarity):
4320 iD.1562 = 0;
4321 #omp parallel shared(iD.1562) -> outer parallel
4322 iD.1562 = iD.1562 + 1;
4324 #omp parallel shared (iD.1562) -> inner parallel
4325 iD.1562 = iD.1562 - 1;
4327 Each parallel structure will create a distinct .omp_data_s structure
4328 for copying iD.1562 in/out of the directive:
4330 outer parallel .omp_data_s.1.i -> iD.1562
4331 inner parallel .omp_data_s.2.i -> iD.1562
4333 A shared variable mapping will produce a copy-out operation before
4334 the parallel directive and a copy-in operation after it. So, in
4335 this case we would have:
4337 iD.1562 = 0;
4338 .omp_data_o.1.i = iD.1562;
4339 #omp parallel shared(iD.1562) -> outer parallel
4340 .omp_data_i.1 = &.omp_data_o.1
4341 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4343 .omp_data_o.2.i = iD.1562; -> **
4344 #omp parallel shared(iD.1562) -> inner parallel
4345 .omp_data_i.2 = &.omp_data_o.2
4346 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4349 ** This is a problem. The symbol iD.1562 cannot be referenced
4350 inside the body of the outer parallel region. But since we are
4351 emitting this copy operation while expanding the inner parallel
4352 directive, we need to access the CTX structure of the outer
4353 parallel directive to get the correct mapping:
4355 .omp_data_o.2.i = .omp_data_i.1->i
4357 Since there may be other workshare or parallel directives enclosing
4358 the parallel directive, it may be necessary to walk up the context
4359 parent chain. This is not a problem in general because nested
4360 parallelism happens only rarely. */
4362 static tree
4363 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4365 tree t;
4366 omp_context *up;
4368 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4369 t = maybe_lookup_decl (decl, up);
4371 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4373 return t ? t : decl;
4377 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4378 in outer contexts. */
4380 static tree
4381 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4383 tree t = NULL;
4384 omp_context *up;
4386 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4387 t = maybe_lookup_decl (decl, up);
4389 return t ? t : decl;
4393 /* Construct the initialization value for reduction operation OP. */
4395 tree
4396 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4398 switch (op)
4400 case PLUS_EXPR:
4401 case MINUS_EXPR:
4402 case BIT_IOR_EXPR:
4403 case BIT_XOR_EXPR:
4404 case TRUTH_OR_EXPR:
4405 case TRUTH_ORIF_EXPR:
4406 case TRUTH_XOR_EXPR:
4407 case NE_EXPR:
4408 return build_zero_cst (type);
4410 case MULT_EXPR:
4411 case TRUTH_AND_EXPR:
4412 case TRUTH_ANDIF_EXPR:
4413 case EQ_EXPR:
4414 return fold_convert_loc (loc, type, integer_one_node);
4416 case BIT_AND_EXPR:
4417 return fold_convert_loc (loc, type, integer_minus_one_node);
4419 case MAX_EXPR:
4420 if (SCALAR_FLOAT_TYPE_P (type))
4422 REAL_VALUE_TYPE max, min;
4423 if (HONOR_INFINITIES (type))
4425 real_inf (&max);
4426 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4428 else
4429 real_maxval (&min, 1, TYPE_MODE (type));
4430 return build_real (type, min);
4432 else if (POINTER_TYPE_P (type))
4434 wide_int min
4435 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4436 return wide_int_to_tree (type, min);
4438 else
4440 gcc_assert (INTEGRAL_TYPE_P (type));
4441 return TYPE_MIN_VALUE (type);
4444 case MIN_EXPR:
4445 if (SCALAR_FLOAT_TYPE_P (type))
4447 REAL_VALUE_TYPE max;
4448 if (HONOR_INFINITIES (type))
4449 real_inf (&max);
4450 else
4451 real_maxval (&max, 0, TYPE_MODE (type));
4452 return build_real (type, max);
4454 else if (POINTER_TYPE_P (type))
4456 wide_int max
4457 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4458 return wide_int_to_tree (type, max);
4460 else
4462 gcc_assert (INTEGRAL_TYPE_P (type));
4463 return TYPE_MAX_VALUE (type);
4466 default:
4467 gcc_unreachable ();
4471 /* Construct the initialization value for reduction CLAUSE. */
4473 tree
4474 omp_reduction_init (tree clause, tree type)
4476 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4477 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4480 /* Return alignment to be assumed for var in CLAUSE, which should be
4481 OMP_CLAUSE_ALIGNED. */
4483 static tree
4484 omp_clause_aligned_alignment (tree clause)
4486 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4487 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4489 /* Otherwise return implementation defined alignment. */
4490 unsigned int al = 1;
4491 opt_scalar_mode mode_iter;
4492 auto_vector_modes modes;
4493 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4494 static enum mode_class classes[]
4495 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4496 for (int i = 0; i < 4; i += 2)
4497 /* The for loop above dictates that we only walk through scalar classes. */
4498 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4500 scalar_mode mode = mode_iter.require ();
4501 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4502 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4503 continue;
4504 machine_mode alt_vmode;
4505 for (unsigned int j = 0; j < modes.length (); ++j)
4506 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4507 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4508 vmode = alt_vmode;
4510 tree type = lang_hooks.types.type_for_mode (mode, 1);
4511 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4512 continue;
4513 type = build_vector_type_for_mode (type, vmode);
4514 if (TYPE_MODE (type) != vmode)
4515 continue;
4516 if (TYPE_ALIGN_UNIT (type) > al)
4517 al = TYPE_ALIGN_UNIT (type);
4519 return build_int_cst (integer_type_node, al);
4523 /* This structure is part of the interface between lower_rec_simd_input_clauses
4524 and lower_rec_input_clauses. */
4526 class omplow_simd_context {
4527 public:
4528 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4529 tree idx;
4530 tree lane;
4531 tree lastlane;
4532 vec<tree, va_heap> simt_eargs;
4533 gimple_seq simt_dlist;
4534 poly_uint64_pod max_vf;
4535 bool is_simt;
4538 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4539 privatization. */
4541 static bool
4542 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4543 omplow_simd_context *sctx, tree &ivar,
4544 tree &lvar, tree *rvar = NULL,
4545 tree *rvar2 = NULL)
4547 if (known_eq (sctx->max_vf, 0U))
4549 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4550 if (maybe_gt (sctx->max_vf, 1U))
4552 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4553 OMP_CLAUSE_SAFELEN);
4554 if (c)
4556 poly_uint64 safe_len;
4557 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4558 || maybe_lt (safe_len, 1U))
4559 sctx->max_vf = 1;
4560 else
4561 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4564 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4566 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4567 c = OMP_CLAUSE_CHAIN (c))
4569 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4570 continue;
4572 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4574 /* UDR reductions are not supported yet for SIMT, disable
4575 SIMT. */
4576 sctx->max_vf = 1;
4577 break;
4580 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4581 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4583 /* Doing boolean operations on non-integral types is
4584 for conformance only, it's not worth supporting this
4585 for SIMT. */
4586 sctx->max_vf = 1;
4587 break;
4591 if (maybe_gt (sctx->max_vf, 1U))
4593 sctx->idx = create_tmp_var (unsigned_type_node);
4594 sctx->lane = create_tmp_var (unsigned_type_node);
4597 if (known_eq (sctx->max_vf, 1U))
4598 return false;
4600 if (sctx->is_simt)
4602 if (is_gimple_reg (new_var))
4604 ivar = lvar = new_var;
4605 return true;
4607 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4608 ivar = lvar = create_tmp_var (type);
4609 TREE_ADDRESSABLE (ivar) = 1;
4610 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4611 NULL, DECL_ATTRIBUTES (ivar));
4612 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4613 tree clobber = build_clobber (type);
4614 gimple *g = gimple_build_assign (ivar, clobber);
4615 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4617 else
4619 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4620 tree avar = create_tmp_var_raw (atype);
4621 if (TREE_ADDRESSABLE (new_var))
4622 TREE_ADDRESSABLE (avar) = 1;
4623 DECL_ATTRIBUTES (avar)
4624 = tree_cons (get_identifier ("omp simd array"), NULL,
4625 DECL_ATTRIBUTES (avar));
4626 gimple_add_tmp_var (avar);
4627 tree iavar = avar;
4628 if (rvar && !ctx->for_simd_scan_phase)
4630 /* For inscan reductions, create another array temporary,
4631 which will hold the reduced value. */
4632 iavar = create_tmp_var_raw (atype);
4633 if (TREE_ADDRESSABLE (new_var))
4634 TREE_ADDRESSABLE (iavar) = 1;
4635 DECL_ATTRIBUTES (iavar)
4636 = tree_cons (get_identifier ("omp simd array"), NULL,
4637 tree_cons (get_identifier ("omp simd inscan"), NULL,
4638 DECL_ATTRIBUTES (iavar)));
4639 gimple_add_tmp_var (iavar);
4640 ctx->cb.decl_map->put (avar, iavar);
4641 if (sctx->lastlane == NULL_TREE)
4642 sctx->lastlane = create_tmp_var (unsigned_type_node);
4643 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4644 sctx->lastlane, NULL_TREE, NULL_TREE);
4645 TREE_THIS_NOTRAP (*rvar) = 1;
4647 if (ctx->scan_exclusive)
4649 /* And for exclusive scan yet another one, which will
4650 hold the value during the scan phase. */
4651 tree savar = create_tmp_var_raw (atype);
4652 if (TREE_ADDRESSABLE (new_var))
4653 TREE_ADDRESSABLE (savar) = 1;
4654 DECL_ATTRIBUTES (savar)
4655 = tree_cons (get_identifier ("omp simd array"), NULL,
4656 tree_cons (get_identifier ("omp simd inscan "
4657 "exclusive"), NULL,
4658 DECL_ATTRIBUTES (savar)));
4659 gimple_add_tmp_var (savar);
4660 ctx->cb.decl_map->put (iavar, savar);
4661 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4662 sctx->idx, NULL_TREE, NULL_TREE);
4663 TREE_THIS_NOTRAP (*rvar2) = 1;
4666 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4667 NULL_TREE, NULL_TREE);
4668 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4669 NULL_TREE, NULL_TREE);
4670 TREE_THIS_NOTRAP (ivar) = 1;
4671 TREE_THIS_NOTRAP (lvar) = 1;
4673 if (DECL_P (new_var))
4675 SET_DECL_VALUE_EXPR (new_var, lvar);
4676 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4678 return true;
4681 /* Helper function of lower_rec_input_clauses. For a reference
4682 in simd reduction, add an underlying variable it will reference. */
4684 static void
4685 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4687 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4688 if (TREE_CONSTANT (z))
4690 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4691 get_name (new_vard));
4692 gimple_add_tmp_var (z);
4693 TREE_ADDRESSABLE (z) = 1;
4694 z = build_fold_addr_expr_loc (loc, z);
4695 gimplify_assign (new_vard, z, ilist);
4699 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4700 code to emit (type) (tskred_temp[idx]). */
4702 static tree
4703 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4704 unsigned idx)
4706 unsigned HOST_WIDE_INT sz
4707 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4708 tree r = build2 (MEM_REF, pointer_sized_int_node,
4709 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4710 idx * sz));
4711 tree v = create_tmp_var (pointer_sized_int_node);
4712 gimple *g = gimple_build_assign (v, r);
4713 gimple_seq_add_stmt (ilist, g);
4714 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4716 v = create_tmp_var (type);
4717 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4718 gimple_seq_add_stmt (ilist, g);
4720 return v;
4723 /* Lower early initialization of privatized variable NEW_VAR
4724 if it needs an allocator (has allocate clause). */
4726 static bool
4727 lower_private_allocate (tree var, tree new_var, tree &allocator,
4728 tree &allocate_ptr, gimple_seq *ilist,
4729 omp_context *ctx, bool is_ref, tree size)
4731 if (allocator)
4732 return false;
4733 gcc_assert (allocate_ptr == NULL_TREE);
4734 if (ctx->allocate_map
4735 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4736 if (tree *allocatorp = ctx->allocate_map->get (var))
4737 allocator = *allocatorp;
4738 if (allocator == NULL_TREE)
4739 return false;
4740 if (!is_ref && omp_privatize_by_reference (var))
4742 allocator = NULL_TREE;
4743 return false;
4746 unsigned HOST_WIDE_INT ialign = 0;
4747 if (TREE_CODE (allocator) == TREE_LIST)
4749 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4750 allocator = TREE_PURPOSE (allocator);
4752 if (TREE_CODE (allocator) != INTEGER_CST)
4753 allocator = build_outer_var_ref (allocator, ctx);
4754 allocator = fold_convert (pointer_sized_int_node, allocator);
4755 if (TREE_CODE (allocator) != INTEGER_CST)
4757 tree var = create_tmp_var (TREE_TYPE (allocator));
4758 gimplify_assign (var, allocator, ilist);
4759 allocator = var;
4762 tree ptr_type, align, sz = size;
4763 if (TYPE_P (new_var))
4765 ptr_type = build_pointer_type (new_var);
4766 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4768 else if (is_ref)
4770 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4771 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4773 else
4775 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4776 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4777 if (sz == NULL_TREE)
4778 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4780 align = build_int_cst (size_type_node, ialign);
4781 if (TREE_CODE (sz) != INTEGER_CST)
4783 tree szvar = create_tmp_var (size_type_node);
4784 gimplify_assign (szvar, sz, ilist);
4785 sz = szvar;
4787 allocate_ptr = create_tmp_var (ptr_type);
4788 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4789 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4790 gimple_call_set_lhs (g, allocate_ptr);
4791 gimple_seq_add_stmt (ilist, g);
4792 if (!is_ref)
4794 tree x = build_simple_mem_ref (allocate_ptr);
4795 TREE_THIS_NOTRAP (x) = 1;
4796 SET_DECL_VALUE_EXPR (new_var, x);
4797 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4799 return true;
4802 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4803 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4804 private variables. Initialization statements go in ILIST, while calls
4805 to destructors go in DLIST. */
4807 static void
4808 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4809 omp_context *ctx, struct omp_for_data *fd)
4811 tree c, copyin_seq, x, ptr;
4812 bool copyin_by_ref = false;
4813 bool lastprivate_firstprivate = false;
4814 bool reduction_omp_orig_ref = false;
4815 int pass;
4816 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4817 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4818 omplow_simd_context sctx = omplow_simd_context ();
4819 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4820 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4821 gimple_seq llist[4] = { };
4822 tree nonconst_simd_if = NULL_TREE;
4824 copyin_seq = NULL;
4825 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4827 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4828 with data sharing clauses referencing variable sized vars. That
4829 is unnecessarily hard to support and very unlikely to result in
4830 vectorized code anyway. */
4831 if (is_simd)
4832 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4833 switch (OMP_CLAUSE_CODE (c))
4835 case OMP_CLAUSE_LINEAR:
4836 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4837 sctx.max_vf = 1;
4838 /* FALLTHRU */
4839 case OMP_CLAUSE_PRIVATE:
4840 case OMP_CLAUSE_FIRSTPRIVATE:
4841 case OMP_CLAUSE_LASTPRIVATE:
4842 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4843 sctx.max_vf = 1;
4844 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4846 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4847 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4848 sctx.max_vf = 1;
4850 break;
4851 case OMP_CLAUSE_REDUCTION:
4852 case OMP_CLAUSE_IN_REDUCTION:
4853 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4854 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4855 sctx.max_vf = 1;
4856 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4858 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4859 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4860 sctx.max_vf = 1;
4862 break;
4863 case OMP_CLAUSE_IF:
4864 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4865 sctx.max_vf = 1;
4866 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4867 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4868 break;
4869 case OMP_CLAUSE_SIMDLEN:
4870 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4871 sctx.max_vf = 1;
4872 break;
4873 case OMP_CLAUSE__CONDTEMP_:
4874 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4875 if (sctx.is_simt)
4876 sctx.max_vf = 1;
4877 break;
4878 default:
4879 continue;
4882 /* Add a placeholder for simduid. */
4883 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4884 sctx.simt_eargs.safe_push (NULL_TREE);
4886 unsigned task_reduction_cnt = 0;
4887 unsigned task_reduction_cntorig = 0;
4888 unsigned task_reduction_cnt_full = 0;
4889 unsigned task_reduction_cntorig_full = 0;
4890 unsigned task_reduction_other_cnt = 0;
4891 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4892 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4893 /* Do all the fixed sized types in the first pass, and the variable sized
4894 types in the second pass. This makes sure that the scalar arguments to
4895 the variable sized types are processed before we use them in the
4896 variable sized operations. For task reductions we use 4 passes, in the
4897 first two we ignore them, in the third one gather arguments for
4898 GOMP_task_reduction_remap call and in the last pass actually handle
4899 the task reductions. */
4900 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4901 ? 4 : 2); ++pass)
4903 if (pass == 2 && task_reduction_cnt)
4905 tskred_atype
4906 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4907 + task_reduction_cntorig);
4908 tskred_avar = create_tmp_var_raw (tskred_atype);
4909 gimple_add_tmp_var (tskred_avar);
4910 TREE_ADDRESSABLE (tskred_avar) = 1;
4911 task_reduction_cnt_full = task_reduction_cnt;
4912 task_reduction_cntorig_full = task_reduction_cntorig;
4914 else if (pass == 3 && task_reduction_cnt)
4916 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4917 gimple *g
4918 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4919 size_int (task_reduction_cntorig),
4920 build_fold_addr_expr (tskred_avar));
4921 gimple_seq_add_stmt (ilist, g);
4923 if (pass == 3 && task_reduction_other_cnt)
4925 /* For reduction clauses, build
4926 tskred_base = (void *) tskred_temp[2]
4927 + omp_get_thread_num () * tskred_temp[1]
4928 or if tskred_temp[1] is known to be constant, that constant
4929 directly. This is the start of the private reduction copy block
4930 for the current thread. */
4931 tree v = create_tmp_var (integer_type_node);
4932 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4933 gimple *g = gimple_build_call (x, 0);
4934 gimple_call_set_lhs (g, v);
4935 gimple_seq_add_stmt (ilist, g);
4936 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4937 tskred_temp = OMP_CLAUSE_DECL (c);
4938 if (is_taskreg_ctx (ctx))
4939 tskred_temp = lookup_decl (tskred_temp, ctx);
4940 tree v2 = create_tmp_var (sizetype);
4941 g = gimple_build_assign (v2, NOP_EXPR, v);
4942 gimple_seq_add_stmt (ilist, g);
4943 if (ctx->task_reductions[0])
4944 v = fold_convert (sizetype, ctx->task_reductions[0]);
4945 else
4946 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4947 tree v3 = create_tmp_var (sizetype);
4948 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4949 gimple_seq_add_stmt (ilist, g);
4950 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4951 tskred_base = create_tmp_var (ptr_type_node);
4952 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4953 gimple_seq_add_stmt (ilist, g);
4955 task_reduction_cnt = 0;
4956 task_reduction_cntorig = 0;
4957 task_reduction_other_cnt = 0;
4958 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4960 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4961 tree var, new_var;
4962 bool by_ref;
4963 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4964 bool task_reduction_p = false;
4965 bool task_reduction_needs_orig_p = false;
4966 tree cond = NULL_TREE;
4967 tree allocator, allocate_ptr;
4969 switch (c_kind)
4971 case OMP_CLAUSE_PRIVATE:
4972 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4973 continue;
4974 break;
4975 case OMP_CLAUSE_SHARED:
4976 /* Ignore shared directives in teams construct inside
4977 of target construct. */
4978 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4979 && !is_host_teams_ctx (ctx))
4980 continue;
4981 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4983 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4984 || is_global_var (OMP_CLAUSE_DECL (c)));
4985 continue;
4987 case OMP_CLAUSE_FIRSTPRIVATE:
4988 case OMP_CLAUSE_COPYIN:
4989 break;
4990 case OMP_CLAUSE_LINEAR:
4991 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4992 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4993 lastprivate_firstprivate = true;
4994 break;
4995 case OMP_CLAUSE_REDUCTION:
4996 case OMP_CLAUSE_IN_REDUCTION:
4997 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4998 || is_task_ctx (ctx)
4999 || OMP_CLAUSE_REDUCTION_TASK (c))
5001 task_reduction_p = true;
5002 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5004 task_reduction_other_cnt++;
5005 if (pass == 2)
5006 continue;
5008 else
5009 task_reduction_cnt++;
5010 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5012 var = OMP_CLAUSE_DECL (c);
5013 /* If var is a global variable that isn't privatized
5014 in outer contexts, we don't need to look up the
5015 original address, it is always the address of the
5016 global variable itself. */
5017 if (!DECL_P (var)
5018 || omp_privatize_by_reference (var)
5019 || !is_global_var
5020 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5022 task_reduction_needs_orig_p = true;
5023 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5024 task_reduction_cntorig++;
5028 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5029 reduction_omp_orig_ref = true;
5030 break;
5031 case OMP_CLAUSE__REDUCTEMP_:
5032 if (!is_taskreg_ctx (ctx))
5033 continue;
5034 /* FALLTHRU */
5035 case OMP_CLAUSE__LOOPTEMP_:
5036 /* Handle _looptemp_/_reductemp_ clauses only on
5037 parallel/task. */
5038 if (fd)
5039 continue;
5040 break;
5041 case OMP_CLAUSE_LASTPRIVATE:
5042 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5044 lastprivate_firstprivate = true;
5045 if (pass != 0 || is_taskloop_ctx (ctx))
5046 continue;
5048 /* Even without corresponding firstprivate, if
5049 decl is Fortran allocatable, it needs outer var
5050 reference. */
5051 else if (pass == 0
5052 && lang_hooks.decls.omp_private_outer_ref
5053 (OMP_CLAUSE_DECL (c)))
5054 lastprivate_firstprivate = true;
5055 break;
5056 case OMP_CLAUSE_ALIGNED:
5057 if (pass != 1)
5058 continue;
5059 var = OMP_CLAUSE_DECL (c);
5060 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5061 && !is_global_var (var))
5063 new_var = maybe_lookup_decl (var, ctx);
5064 if (new_var == NULL_TREE)
5065 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5066 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5067 tree alarg = omp_clause_aligned_alignment (c);
5068 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5069 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5070 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5071 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5072 gimplify_and_add (x, ilist);
5074 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5075 && is_global_var (var))
5077 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5078 new_var = lookup_decl (var, ctx);
5079 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5080 t = build_fold_addr_expr_loc (clause_loc, t);
5081 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5082 tree alarg = omp_clause_aligned_alignment (c);
5083 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5084 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5085 t = fold_convert_loc (clause_loc, ptype, t);
5086 x = create_tmp_var (ptype);
5087 t = build2 (MODIFY_EXPR, ptype, x, t);
5088 gimplify_and_add (t, ilist);
5089 t = build_simple_mem_ref_loc (clause_loc, x);
5090 SET_DECL_VALUE_EXPR (new_var, t);
5091 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5093 continue;
5094 case OMP_CLAUSE__CONDTEMP_:
5095 if (is_parallel_ctx (ctx)
5096 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5097 break;
5098 continue;
5099 default:
5100 continue;
5103 if (task_reduction_p != (pass >= 2))
5104 continue;
5106 allocator = NULL_TREE;
5107 allocate_ptr = NULL_TREE;
5108 new_var = var = OMP_CLAUSE_DECL (c);
5109 if ((c_kind == OMP_CLAUSE_REDUCTION
5110 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5111 && TREE_CODE (var) == MEM_REF)
5113 var = TREE_OPERAND (var, 0);
5114 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5115 var = TREE_OPERAND (var, 0);
5116 if (TREE_CODE (var) == INDIRECT_REF
5117 || TREE_CODE (var) == ADDR_EXPR)
5118 var = TREE_OPERAND (var, 0);
5119 if (is_variable_sized (var))
5121 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5122 var = DECL_VALUE_EXPR (var);
5123 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5124 var = TREE_OPERAND (var, 0);
5125 gcc_assert (DECL_P (var));
5127 new_var = var;
5129 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5131 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5132 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5134 else if (c_kind != OMP_CLAUSE_COPYIN)
5135 new_var = lookup_decl (var, ctx);
5137 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5139 if (pass != 0)
5140 continue;
5142 /* C/C++ array section reductions. */
5143 else if ((c_kind == OMP_CLAUSE_REDUCTION
5144 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5145 && var != OMP_CLAUSE_DECL (c))
5147 if (pass == 0)
5148 continue;
5150 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5151 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5153 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5155 tree b = TREE_OPERAND (orig_var, 1);
5156 if (is_omp_target (ctx->stmt))
5157 b = NULL_TREE;
5158 else
5159 b = maybe_lookup_decl (b, ctx);
5160 if (b == NULL)
5162 b = TREE_OPERAND (orig_var, 1);
5163 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5165 if (integer_zerop (bias))
5166 bias = b;
5167 else
5169 bias = fold_convert_loc (clause_loc,
5170 TREE_TYPE (b), bias);
5171 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5172 TREE_TYPE (b), b, bias);
5174 orig_var = TREE_OPERAND (orig_var, 0);
5176 if (pass == 2)
5178 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5179 if (is_global_var (out)
5180 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5181 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5182 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5183 != POINTER_TYPE)))
5184 x = var;
5185 else if (is_omp_target (ctx->stmt))
5186 x = out;
5187 else
5189 bool by_ref = use_pointer_for_field (var, NULL);
5190 x = build_receiver_ref (var, by_ref, ctx);
5191 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5192 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5193 == POINTER_TYPE))
5194 x = build_fold_addr_expr (x);
5196 if (TREE_CODE (orig_var) == INDIRECT_REF)
5197 x = build_simple_mem_ref (x);
5198 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5200 if (var == TREE_OPERAND (orig_var, 0))
5201 x = build_fold_addr_expr (x);
5203 bias = fold_convert (sizetype, bias);
5204 x = fold_convert (ptr_type_node, x);
5205 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5206 TREE_TYPE (x), x, bias);
5207 unsigned cnt = task_reduction_cnt - 1;
5208 if (!task_reduction_needs_orig_p)
5209 cnt += (task_reduction_cntorig_full
5210 - task_reduction_cntorig);
5211 else
5212 cnt = task_reduction_cntorig - 1;
5213 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5214 size_int (cnt), NULL_TREE, NULL_TREE);
5215 gimplify_assign (r, x, ilist);
5216 continue;
5219 if (TREE_CODE (orig_var) == INDIRECT_REF
5220 || TREE_CODE (orig_var) == ADDR_EXPR)
5221 orig_var = TREE_OPERAND (orig_var, 0);
5222 tree d = OMP_CLAUSE_DECL (c);
5223 tree type = TREE_TYPE (d);
5224 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5225 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5226 tree sz = v;
5227 const char *name = get_name (orig_var);
5228 if (pass != 3 && !TREE_CONSTANT (v))
5230 tree t;
5231 if (is_omp_target (ctx->stmt))
5232 t = NULL_TREE;
5233 else
5234 t = maybe_lookup_decl (v, ctx);
5235 if (t)
5236 v = t;
5237 else
5238 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5239 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5240 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5241 TREE_TYPE (v), v,
5242 build_int_cst (TREE_TYPE (v), 1));
5243 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5244 TREE_TYPE (v), t,
5245 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5247 if (pass == 3)
5249 tree xv = create_tmp_var (ptr_type_node);
5250 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5252 unsigned cnt = task_reduction_cnt - 1;
5253 if (!task_reduction_needs_orig_p)
5254 cnt += (task_reduction_cntorig_full
5255 - task_reduction_cntorig);
5256 else
5257 cnt = task_reduction_cntorig - 1;
5258 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5259 size_int (cnt), NULL_TREE, NULL_TREE);
5261 gimple *g = gimple_build_assign (xv, x);
5262 gimple_seq_add_stmt (ilist, g);
5264 else
5266 unsigned int idx = *ctx->task_reduction_map->get (c);
5267 tree off;
5268 if (ctx->task_reductions[1 + idx])
5269 off = fold_convert (sizetype,
5270 ctx->task_reductions[1 + idx]);
5271 else
5272 off = task_reduction_read (ilist, tskred_temp, sizetype,
5273 7 + 3 * idx + 1);
5274 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5275 tskred_base, off);
5276 gimple_seq_add_stmt (ilist, g);
5278 x = fold_convert (build_pointer_type (boolean_type_node),
5279 xv);
5280 if (TREE_CONSTANT (v))
5281 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5282 TYPE_SIZE_UNIT (type));
5283 else
5285 tree t;
5286 if (is_omp_target (ctx->stmt))
5287 t = NULL_TREE;
5288 else
5289 t = maybe_lookup_decl (v, ctx);
5290 if (t)
5291 v = t;
5292 else
5293 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5294 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5295 fb_rvalue);
5296 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5297 TREE_TYPE (v), v,
5298 build_int_cst (TREE_TYPE (v), 1));
5299 t = fold_build2_loc (clause_loc, MULT_EXPR,
5300 TREE_TYPE (v), t,
5301 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5302 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5304 cond = create_tmp_var (TREE_TYPE (x));
5305 gimplify_assign (cond, x, ilist);
5306 x = xv;
5308 else if (lower_private_allocate (var, type, allocator,
5309 allocate_ptr, ilist, ctx,
5310 true,
5311 TREE_CONSTANT (v)
5312 ? TYPE_SIZE_UNIT (type)
5313 : sz))
5314 x = allocate_ptr;
5315 else if (TREE_CONSTANT (v))
5317 x = create_tmp_var_raw (type, name);
5318 gimple_add_tmp_var (x);
5319 TREE_ADDRESSABLE (x) = 1;
5320 x = build_fold_addr_expr_loc (clause_loc, x);
5322 else
5324 tree atmp
5325 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5326 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5327 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5330 tree ptype = build_pointer_type (TREE_TYPE (type));
5331 x = fold_convert_loc (clause_loc, ptype, x);
5332 tree y = create_tmp_var (ptype, name);
5333 gimplify_assign (y, x, ilist);
5334 x = y;
5335 tree yb = y;
5337 if (!integer_zerop (bias))
5339 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5340 bias);
5341 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5343 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5344 pointer_sized_int_node, yb, bias);
5345 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5346 yb = create_tmp_var (ptype, name);
5347 gimplify_assign (yb, x, ilist);
5348 x = yb;
5351 d = TREE_OPERAND (d, 0);
5352 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5353 d = TREE_OPERAND (d, 0);
5354 if (TREE_CODE (d) == ADDR_EXPR)
5356 if (orig_var != var)
5358 gcc_assert (is_variable_sized (orig_var));
5359 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5361 gimplify_assign (new_var, x, ilist);
5362 tree new_orig_var = lookup_decl (orig_var, ctx);
5363 tree t = build_fold_indirect_ref (new_var);
5364 DECL_IGNORED_P (new_var) = 0;
5365 TREE_THIS_NOTRAP (t) = 1;
5366 SET_DECL_VALUE_EXPR (new_orig_var, t);
5367 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5369 else
5371 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5372 build_int_cst (ptype, 0));
5373 SET_DECL_VALUE_EXPR (new_var, x);
5374 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5377 else
5379 gcc_assert (orig_var == var);
5380 if (TREE_CODE (d) == INDIRECT_REF)
5382 x = create_tmp_var (ptype, name);
5383 TREE_ADDRESSABLE (x) = 1;
5384 gimplify_assign (x, yb, ilist);
5385 x = build_fold_addr_expr_loc (clause_loc, x);
5387 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5388 gimplify_assign (new_var, x, ilist);
5390 /* GOMP_taskgroup_reduction_register memsets the whole
5391 array to zero. If the initializer is zero, we don't
5392 need to initialize it again, just mark it as ever
5393 used unconditionally, i.e. cond = true. */
5394 if (cond
5395 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5396 && initializer_zerop (omp_reduction_init (c,
5397 TREE_TYPE (type))))
5399 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5400 boolean_true_node);
5401 gimple_seq_add_stmt (ilist, g);
5402 continue;
5404 tree end = create_artificial_label (UNKNOWN_LOCATION);
5405 if (cond)
5407 gimple *g;
5408 if (!is_parallel_ctx (ctx))
5410 tree condv = create_tmp_var (boolean_type_node);
5411 g = gimple_build_assign (condv,
5412 build_simple_mem_ref (cond));
5413 gimple_seq_add_stmt (ilist, g);
5414 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5415 g = gimple_build_cond (NE_EXPR, condv,
5416 boolean_false_node, end, lab1);
5417 gimple_seq_add_stmt (ilist, g);
5418 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5420 g = gimple_build_assign (build_simple_mem_ref (cond),
5421 boolean_true_node);
5422 gimple_seq_add_stmt (ilist, g);
5425 tree y1 = create_tmp_var (ptype);
5426 gimplify_assign (y1, y, ilist);
5427 tree i2 = NULL_TREE, y2 = NULL_TREE;
5428 tree body2 = NULL_TREE, end2 = NULL_TREE;
5429 tree y3 = NULL_TREE, y4 = NULL_TREE;
5430 if (task_reduction_needs_orig_p)
5432 y3 = create_tmp_var (ptype);
5433 tree ref;
5434 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5435 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5436 size_int (task_reduction_cnt_full
5437 + task_reduction_cntorig - 1),
5438 NULL_TREE, NULL_TREE);
5439 else
5441 unsigned int idx = *ctx->task_reduction_map->get (c);
5442 ref = task_reduction_read (ilist, tskred_temp, ptype,
5443 7 + 3 * idx);
5445 gimplify_assign (y3, ref, ilist);
5447 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5449 if (pass != 3)
5451 y2 = create_tmp_var (ptype);
5452 gimplify_assign (y2, y, ilist);
5454 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5456 tree ref = build_outer_var_ref (var, ctx);
5457 /* For ref build_outer_var_ref already performs this. */
5458 if (TREE_CODE (d) == INDIRECT_REF)
5459 gcc_assert (omp_privatize_by_reference (var));
5460 else if (TREE_CODE (d) == ADDR_EXPR)
5461 ref = build_fold_addr_expr (ref);
5462 else if (omp_privatize_by_reference (var))
5463 ref = build_fold_addr_expr (ref);
5464 ref = fold_convert_loc (clause_loc, ptype, ref);
5465 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5466 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5468 y3 = create_tmp_var (ptype);
5469 gimplify_assign (y3, unshare_expr (ref), ilist);
5471 if (is_simd)
5473 y4 = create_tmp_var (ptype);
5474 gimplify_assign (y4, ref, dlist);
5478 tree i = create_tmp_var (TREE_TYPE (v));
5479 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5480 tree body = create_artificial_label (UNKNOWN_LOCATION);
5481 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5482 if (y2)
5484 i2 = create_tmp_var (TREE_TYPE (v));
5485 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5486 body2 = create_artificial_label (UNKNOWN_LOCATION);
5487 end2 = create_artificial_label (UNKNOWN_LOCATION);
5488 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5490 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5492 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5493 tree decl_placeholder
5494 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5495 SET_DECL_VALUE_EXPR (decl_placeholder,
5496 build_simple_mem_ref (y1));
5497 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5498 SET_DECL_VALUE_EXPR (placeholder,
5499 y3 ? build_simple_mem_ref (y3)
5500 : error_mark_node);
5501 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5502 x = lang_hooks.decls.omp_clause_default_ctor
5503 (c, build_simple_mem_ref (y1),
5504 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5505 if (x)
5506 gimplify_and_add (x, ilist);
5507 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5509 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5510 lower_omp (&tseq, ctx);
5511 gimple_seq_add_seq (ilist, tseq);
5513 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5514 if (is_simd)
5516 SET_DECL_VALUE_EXPR (decl_placeholder,
5517 build_simple_mem_ref (y2));
5518 SET_DECL_VALUE_EXPR (placeholder,
5519 build_simple_mem_ref (y4));
5520 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5521 lower_omp (&tseq, ctx);
5522 gimple_seq_add_seq (dlist, tseq);
5523 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5525 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5526 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5527 if (y2)
5529 x = lang_hooks.decls.omp_clause_dtor
5530 (c, build_simple_mem_ref (y2));
5531 if (x)
5532 gimplify_and_add (x, dlist);
5535 else
5537 x = omp_reduction_init (c, TREE_TYPE (type));
5538 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5540 /* reduction(-:var) sums up the partial results, so it
5541 acts identically to reduction(+:var). */
5542 if (code == MINUS_EXPR)
5543 code = PLUS_EXPR;
5545 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5546 if (is_simd)
5548 x = build2 (code, TREE_TYPE (type),
5549 build_simple_mem_ref (y4),
5550 build_simple_mem_ref (y2));
5551 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5554 gimple *g
5555 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5556 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5557 gimple_seq_add_stmt (ilist, g);
5558 if (y3)
5560 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5561 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5562 gimple_seq_add_stmt (ilist, g);
5564 g = gimple_build_assign (i, PLUS_EXPR, i,
5565 build_int_cst (TREE_TYPE (i), 1));
5566 gimple_seq_add_stmt (ilist, g);
5567 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5568 gimple_seq_add_stmt (ilist, g);
5569 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5570 if (y2)
5572 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5573 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5574 gimple_seq_add_stmt (dlist, g);
5575 if (y4)
5577 g = gimple_build_assign
5578 (y4, POINTER_PLUS_EXPR, y4,
5579 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5580 gimple_seq_add_stmt (dlist, g);
5582 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5583 build_int_cst (TREE_TYPE (i2), 1));
5584 gimple_seq_add_stmt (dlist, g);
5585 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5586 gimple_seq_add_stmt (dlist, g);
5587 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5589 if (allocator)
5591 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5592 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5593 gimple_seq_add_stmt (dlist, g);
5595 continue;
5597 else if (pass == 2)
5599 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5600 if (is_global_var (out))
5601 x = var;
5602 else if (is_omp_target (ctx->stmt))
5603 x = out;
5604 else
5606 bool by_ref = use_pointer_for_field (var, ctx);
5607 x = build_receiver_ref (var, by_ref, ctx);
5609 if (!omp_privatize_by_reference (var))
5610 x = build_fold_addr_expr (x);
5611 x = fold_convert (ptr_type_node, x);
5612 unsigned cnt = task_reduction_cnt - 1;
5613 if (!task_reduction_needs_orig_p)
5614 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5615 else
5616 cnt = task_reduction_cntorig - 1;
5617 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5618 size_int (cnt), NULL_TREE, NULL_TREE);
5619 gimplify_assign (r, x, ilist);
5620 continue;
5622 else if (pass == 3)
5624 tree type = TREE_TYPE (new_var);
5625 if (!omp_privatize_by_reference (var))
5626 type = build_pointer_type (type);
5627 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5629 unsigned cnt = task_reduction_cnt - 1;
5630 if (!task_reduction_needs_orig_p)
5631 cnt += (task_reduction_cntorig_full
5632 - task_reduction_cntorig);
5633 else
5634 cnt = task_reduction_cntorig - 1;
5635 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5636 size_int (cnt), NULL_TREE, NULL_TREE);
5638 else
5640 unsigned int idx = *ctx->task_reduction_map->get (c);
5641 tree off;
5642 if (ctx->task_reductions[1 + idx])
5643 off = fold_convert (sizetype,
5644 ctx->task_reductions[1 + idx]);
5645 else
5646 off = task_reduction_read (ilist, tskred_temp, sizetype,
5647 7 + 3 * idx + 1);
5648 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5649 tskred_base, off);
5651 x = fold_convert (type, x);
5652 tree t;
5653 if (omp_privatize_by_reference (var))
5655 gimplify_assign (new_var, x, ilist);
5656 t = new_var;
5657 new_var = build_simple_mem_ref (new_var);
5659 else
5661 t = create_tmp_var (type);
5662 gimplify_assign (t, x, ilist);
5663 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5664 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5666 t = fold_convert (build_pointer_type (boolean_type_node), t);
5667 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5668 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5669 cond = create_tmp_var (TREE_TYPE (t));
5670 gimplify_assign (cond, t, ilist);
5672 else if (is_variable_sized (var))
5674 /* For variable sized types, we need to allocate the
5675 actual storage here. Call alloca and store the
5676 result in the pointer decl that we created elsewhere. */
5677 if (pass == 0)
5678 continue;
5680 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5682 tree tmp;
5684 ptr = DECL_VALUE_EXPR (new_var);
5685 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5686 ptr = TREE_OPERAND (ptr, 0);
5687 gcc_assert (DECL_P (ptr));
5688 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5690 if (lower_private_allocate (var, new_var, allocator,
5691 allocate_ptr, ilist, ctx,
5692 false, x))
5693 tmp = allocate_ptr;
5694 else
5696 /* void *tmp = __builtin_alloca */
5697 tree atmp
5698 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5699 gcall *stmt
5700 = gimple_build_call (atmp, 2, x,
5701 size_int (DECL_ALIGN (var)));
5702 cfun->calls_alloca = 1;
5703 tmp = create_tmp_var_raw (ptr_type_node);
5704 gimple_add_tmp_var (tmp);
5705 gimple_call_set_lhs (stmt, tmp);
5707 gimple_seq_add_stmt (ilist, stmt);
5710 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5711 gimplify_assign (ptr, x, ilist);
5714 else if (omp_privatize_by_reference (var)
5715 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5716 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5718 /* For references that are being privatized for Fortran,
5719 allocate new backing storage for the new pointer
5720 variable. This allows us to avoid changing all the
5721 code that expects a pointer to something that expects
5722 a direct variable. */
5723 if (pass == 0)
5724 continue;
5726 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5727 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5729 x = build_receiver_ref (var, false, ctx);
5730 if (ctx->allocate_map)
5731 if (tree *allocatep = ctx->allocate_map->get (var))
5733 allocator = *allocatep;
5734 if (TREE_CODE (allocator) == TREE_LIST)
5735 allocator = TREE_PURPOSE (allocator);
5736 if (TREE_CODE (allocator) != INTEGER_CST)
5737 allocator = build_outer_var_ref (allocator, ctx);
5738 allocator = fold_convert (pointer_sized_int_node,
5739 allocator);
5740 allocate_ptr = unshare_expr (x);
5742 if (allocator == NULL_TREE)
5743 x = build_fold_addr_expr_loc (clause_loc, x);
5745 else if (lower_private_allocate (var, new_var, allocator,
5746 allocate_ptr,
5747 ilist, ctx, true, x))
5748 x = allocate_ptr;
5749 else if (TREE_CONSTANT (x))
5751 /* For reduction in SIMD loop, defer adding the
5752 initialization of the reference, because if we decide
5753 to use SIMD array for it, the initilization could cause
5754 expansion ICE. Ditto for other privatization clauses. */
5755 if (is_simd)
5756 x = NULL_TREE;
5757 else
5759 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5760 get_name (var));
5761 gimple_add_tmp_var (x);
5762 TREE_ADDRESSABLE (x) = 1;
5763 x = build_fold_addr_expr_loc (clause_loc, x);
5766 else
5768 tree atmp
5769 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5770 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5771 tree al = size_int (TYPE_ALIGN (rtype));
5772 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5775 if (x)
5777 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5778 gimplify_assign (new_var, x, ilist);
5781 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5783 else if ((c_kind == OMP_CLAUSE_REDUCTION
5784 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5785 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5787 if (pass == 0)
5788 continue;
5790 else if (pass != 0)
5791 continue;
5793 switch (OMP_CLAUSE_CODE (c))
5795 case OMP_CLAUSE_SHARED:
5796 /* Ignore shared directives in teams construct inside
5797 target construct. */
5798 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5799 && !is_host_teams_ctx (ctx))
5800 continue;
5801 /* Shared global vars are just accessed directly. */
5802 if (is_global_var (new_var))
5803 break;
5804 /* For taskloop firstprivate/lastprivate, represented
5805 as firstprivate and shared clause on the task, new_var
5806 is the firstprivate var. */
5807 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5808 break;
5809 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5810 needs to be delayed until after fixup_child_record_type so
5811 that we get the correct type during the dereference. */
5812 by_ref = use_pointer_for_field (var, ctx);
5813 x = build_receiver_ref (var, by_ref, ctx);
5814 SET_DECL_VALUE_EXPR (new_var, x);
5815 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5817 /* ??? If VAR is not passed by reference, and the variable
5818 hasn't been initialized yet, then we'll get a warning for
5819 the store into the omp_data_s structure. Ideally, we'd be
5820 able to notice this and not store anything at all, but
5821 we're generating code too early. Suppress the warning. */
5822 if (!by_ref)
5823 suppress_warning (var, OPT_Wuninitialized);
5824 break;
5826 case OMP_CLAUSE__CONDTEMP_:
5827 if (is_parallel_ctx (ctx))
5829 x = build_receiver_ref (var, false, ctx);
5830 SET_DECL_VALUE_EXPR (new_var, x);
5831 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5833 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5835 x = build_zero_cst (TREE_TYPE (var));
5836 goto do_private;
5838 break;
5840 case OMP_CLAUSE_LASTPRIVATE:
5841 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5842 break;
5843 /* FALLTHRU */
5845 case OMP_CLAUSE_PRIVATE:
5846 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5847 x = build_outer_var_ref (var, ctx);
5848 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5850 if (is_task_ctx (ctx))
5851 x = build_receiver_ref (var, false, ctx);
5852 else
5853 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5855 else
5856 x = NULL;
5857 do_private:
5858 tree nx;
5859 bool copy_ctor;
5860 copy_ctor = false;
5861 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5862 ilist, ctx, false, NULL_TREE);
5863 nx = unshare_expr (new_var);
5864 if (is_simd
5865 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5866 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5867 copy_ctor = true;
5868 if (copy_ctor)
5869 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5870 else
5871 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5872 if (is_simd)
5874 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5875 if ((TREE_ADDRESSABLE (new_var) || nx || y
5876 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5877 && (gimple_omp_for_collapse (ctx->stmt) != 1
5878 || (gimple_omp_for_index (ctx->stmt, 0)
5879 != new_var)))
5880 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5881 || omp_privatize_by_reference (var))
5882 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5883 ivar, lvar))
5885 if (omp_privatize_by_reference (var))
5887 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5888 tree new_vard = TREE_OPERAND (new_var, 0);
5889 gcc_assert (DECL_P (new_vard));
5890 SET_DECL_VALUE_EXPR (new_vard,
5891 build_fold_addr_expr (lvar));
5892 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5895 if (nx)
5897 tree iv = unshare_expr (ivar);
5898 if (copy_ctor)
5899 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5901 else
5902 x = lang_hooks.decls.omp_clause_default_ctor (c,
5906 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5908 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5909 unshare_expr (ivar), x);
5910 nx = x;
5912 if (nx && x)
5913 gimplify_and_add (x, &llist[0]);
5914 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5915 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5917 tree v = new_var;
5918 if (!DECL_P (v))
5920 gcc_assert (TREE_CODE (v) == MEM_REF);
5921 v = TREE_OPERAND (v, 0);
5922 gcc_assert (DECL_P (v));
5924 v = *ctx->lastprivate_conditional_map->get (v);
5925 tree t = create_tmp_var (TREE_TYPE (v));
5926 tree z = build_zero_cst (TREE_TYPE (v));
5927 tree orig_v
5928 = build_outer_var_ref (var, ctx,
5929 OMP_CLAUSE_LASTPRIVATE);
5930 gimple_seq_add_stmt (dlist,
5931 gimple_build_assign (t, z));
5932 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5933 tree civar = DECL_VALUE_EXPR (v);
5934 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5935 civar = unshare_expr (civar);
5936 TREE_OPERAND (civar, 1) = sctx.idx;
5937 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5938 unshare_expr (civar));
5939 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5940 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5941 orig_v, unshare_expr (ivar)));
5942 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5943 civar);
5944 x = build3 (COND_EXPR, void_type_node, cond, x,
5945 void_node);
5946 gimple_seq tseq = NULL;
5947 gimplify_and_add (x, &tseq);
5948 if (ctx->outer)
5949 lower_omp (&tseq, ctx->outer);
5950 gimple_seq_add_seq (&llist[1], tseq);
5952 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5953 && ctx->for_simd_scan_phase)
5955 x = unshare_expr (ivar);
5956 tree orig_v
5957 = build_outer_var_ref (var, ctx,
5958 OMP_CLAUSE_LASTPRIVATE);
5959 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5960 orig_v);
5961 gimplify_and_add (x, &llist[0]);
5963 if (y)
5965 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5966 if (y)
5967 gimplify_and_add (y, &llist[1]);
5969 break;
5971 if (omp_privatize_by_reference (var))
5973 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5974 tree new_vard = TREE_OPERAND (new_var, 0);
5975 gcc_assert (DECL_P (new_vard));
5976 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5977 x = TYPE_SIZE_UNIT (type);
5978 if (TREE_CONSTANT (x))
5980 x = create_tmp_var_raw (type, get_name (var));
5981 gimple_add_tmp_var (x);
5982 TREE_ADDRESSABLE (x) = 1;
5983 x = build_fold_addr_expr_loc (clause_loc, x);
5984 x = fold_convert_loc (clause_loc,
5985 TREE_TYPE (new_vard), x);
5986 gimplify_assign (new_vard, x, ilist);
5990 if (nx)
5991 gimplify_and_add (nx, ilist);
5992 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5993 && is_simd
5994 && ctx->for_simd_scan_phase)
5996 tree orig_v = build_outer_var_ref (var, ctx,
5997 OMP_CLAUSE_LASTPRIVATE);
5998 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5999 orig_v);
6000 gimplify_and_add (x, ilist);
6002 /* FALLTHRU */
6004 do_dtor:
6005 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6006 if (x)
6007 gimplify_and_add (x, dlist);
6008 if (allocator)
6010 if (!is_gimple_val (allocator))
6012 tree avar = create_tmp_var (TREE_TYPE (allocator));
6013 gimplify_assign (avar, allocator, dlist);
6014 allocator = avar;
6016 if (!is_gimple_val (allocate_ptr))
6018 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6019 gimplify_assign (apvar, allocate_ptr, dlist);
6020 allocate_ptr = apvar;
6022 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6023 gimple *g
6024 = gimple_build_call (f, 2, allocate_ptr, allocator);
6025 gimple_seq_add_stmt (dlist, g);
6027 break;
6029 case OMP_CLAUSE_LINEAR:
6030 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6031 goto do_firstprivate;
6032 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6033 x = NULL;
6034 else
6035 x = build_outer_var_ref (var, ctx);
6036 goto do_private;
6038 case OMP_CLAUSE_FIRSTPRIVATE:
6039 if (is_task_ctx (ctx))
6041 if ((omp_privatize_by_reference (var)
6042 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6043 || is_variable_sized (var))
6044 goto do_dtor;
6045 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6046 ctx))
6047 || use_pointer_for_field (var, NULL))
6049 x = build_receiver_ref (var, false, ctx);
6050 if (ctx->allocate_map)
6051 if (tree *allocatep = ctx->allocate_map->get (var))
6053 allocator = *allocatep;
6054 if (TREE_CODE (allocator) == TREE_LIST)
6055 allocator = TREE_PURPOSE (allocator);
6056 if (TREE_CODE (allocator) != INTEGER_CST)
6057 allocator = build_outer_var_ref (allocator, ctx);
6058 allocator = fold_convert (pointer_sized_int_node,
6059 allocator);
6060 allocate_ptr = unshare_expr (x);
6061 x = build_simple_mem_ref (x);
6062 TREE_THIS_NOTRAP (x) = 1;
6064 SET_DECL_VALUE_EXPR (new_var, x);
6065 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6066 goto do_dtor;
6069 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6070 && omp_privatize_by_reference (var))
6072 x = build_outer_var_ref (var, ctx);
6073 gcc_assert (TREE_CODE (x) == MEM_REF
6074 && integer_zerop (TREE_OPERAND (x, 1)));
6075 x = TREE_OPERAND (x, 0);
6076 x = lang_hooks.decls.omp_clause_copy_ctor
6077 (c, unshare_expr (new_var), x);
6078 gimplify_and_add (x, ilist);
6079 goto do_dtor;
6081 do_firstprivate:
6082 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6083 ilist, ctx, false, NULL_TREE);
6084 x = build_outer_var_ref (var, ctx);
6085 if (is_simd)
6087 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6088 && gimple_omp_for_combined_into_p (ctx->stmt))
6090 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6091 tree stept = TREE_TYPE (t);
6092 tree ct = omp_find_clause (clauses,
6093 OMP_CLAUSE__LOOPTEMP_);
6094 gcc_assert (ct);
6095 tree l = OMP_CLAUSE_DECL (ct);
6096 tree n1 = fd->loop.n1;
6097 tree step = fd->loop.step;
6098 tree itype = TREE_TYPE (l);
6099 if (POINTER_TYPE_P (itype))
6100 itype = signed_type_for (itype);
6101 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6102 if (TYPE_UNSIGNED (itype)
6103 && fd->loop.cond_code == GT_EXPR)
6104 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6105 fold_build1 (NEGATE_EXPR, itype, l),
6106 fold_build1 (NEGATE_EXPR,
6107 itype, step));
6108 else
6109 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6110 t = fold_build2 (MULT_EXPR, stept,
6111 fold_convert (stept, l), t);
6113 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6115 if (omp_privatize_by_reference (var))
6117 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6118 tree new_vard = TREE_OPERAND (new_var, 0);
6119 gcc_assert (DECL_P (new_vard));
6120 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6121 nx = TYPE_SIZE_UNIT (type);
6122 if (TREE_CONSTANT (nx))
6124 nx = create_tmp_var_raw (type,
6125 get_name (var));
6126 gimple_add_tmp_var (nx);
6127 TREE_ADDRESSABLE (nx) = 1;
6128 nx = build_fold_addr_expr_loc (clause_loc,
6129 nx);
6130 nx = fold_convert_loc (clause_loc,
6131 TREE_TYPE (new_vard),
6132 nx);
6133 gimplify_assign (new_vard, nx, ilist);
6137 x = lang_hooks.decls.omp_clause_linear_ctor
6138 (c, new_var, x, t);
6139 gimplify_and_add (x, ilist);
6140 goto do_dtor;
6143 if (POINTER_TYPE_P (TREE_TYPE (x)))
6144 x = fold_build2 (POINTER_PLUS_EXPR,
6145 TREE_TYPE (x), x, t);
6146 else
6147 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6150 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6151 || TREE_ADDRESSABLE (new_var)
6152 || omp_privatize_by_reference (var))
6153 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6154 ivar, lvar))
6156 if (omp_privatize_by_reference (var))
6158 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6159 tree new_vard = TREE_OPERAND (new_var, 0);
6160 gcc_assert (DECL_P (new_vard));
6161 SET_DECL_VALUE_EXPR (new_vard,
6162 build_fold_addr_expr (lvar));
6163 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6165 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6167 tree iv = create_tmp_var (TREE_TYPE (new_var));
6168 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6169 gimplify_and_add (x, ilist);
6170 gimple_stmt_iterator gsi
6171 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6172 gassign *g
6173 = gimple_build_assign (unshare_expr (lvar), iv);
6174 gsi_insert_before_without_update (&gsi, g,
6175 GSI_SAME_STMT);
6176 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6177 enum tree_code code = PLUS_EXPR;
6178 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6179 code = POINTER_PLUS_EXPR;
6180 g = gimple_build_assign (iv, code, iv, t);
6181 gsi_insert_before_without_update (&gsi, g,
6182 GSI_SAME_STMT);
6183 break;
6185 x = lang_hooks.decls.omp_clause_copy_ctor
6186 (c, unshare_expr (ivar), x);
6187 gimplify_and_add (x, &llist[0]);
6188 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6189 if (x)
6190 gimplify_and_add (x, &llist[1]);
6191 break;
6193 if (omp_privatize_by_reference (var))
6195 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6196 tree new_vard = TREE_OPERAND (new_var, 0);
6197 gcc_assert (DECL_P (new_vard));
6198 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6199 nx = TYPE_SIZE_UNIT (type);
6200 if (TREE_CONSTANT (nx))
6202 nx = create_tmp_var_raw (type, get_name (var));
6203 gimple_add_tmp_var (nx);
6204 TREE_ADDRESSABLE (nx) = 1;
6205 nx = build_fold_addr_expr_loc (clause_loc, nx);
6206 nx = fold_convert_loc (clause_loc,
6207 TREE_TYPE (new_vard), nx);
6208 gimplify_assign (new_vard, nx, ilist);
6212 x = lang_hooks.decls.omp_clause_copy_ctor
6213 (c, unshare_expr (new_var), x);
6214 gimplify_and_add (x, ilist);
6215 goto do_dtor;
6217 case OMP_CLAUSE__LOOPTEMP_:
6218 case OMP_CLAUSE__REDUCTEMP_:
6219 gcc_assert (is_taskreg_ctx (ctx));
6220 x = build_outer_var_ref (var, ctx);
6221 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6222 gimplify_and_add (x, ilist);
6223 break;
6225 case OMP_CLAUSE_COPYIN:
6226 by_ref = use_pointer_for_field (var, NULL);
6227 x = build_receiver_ref (var, by_ref, ctx);
6228 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6229 append_to_statement_list (x, &copyin_seq);
6230 copyin_by_ref |= by_ref;
6231 break;
6233 case OMP_CLAUSE_REDUCTION:
6234 case OMP_CLAUSE_IN_REDUCTION:
6235 /* OpenACC reductions are initialized using the
6236 GOACC_REDUCTION internal function. */
6237 if (is_gimple_omp_oacc (ctx->stmt))
6238 break;
6239 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6241 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6242 gimple *tseq;
6243 tree ptype = TREE_TYPE (placeholder);
6244 if (cond)
6246 x = error_mark_node;
6247 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6248 && !task_reduction_needs_orig_p)
6249 x = var;
6250 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6252 tree pptype = build_pointer_type (ptype);
6253 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6254 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6255 size_int (task_reduction_cnt_full
6256 + task_reduction_cntorig - 1),
6257 NULL_TREE, NULL_TREE);
6258 else
6260 unsigned int idx
6261 = *ctx->task_reduction_map->get (c);
6262 x = task_reduction_read (ilist, tskred_temp,
6263 pptype, 7 + 3 * idx);
6265 x = fold_convert (pptype, x);
6266 x = build_simple_mem_ref (x);
6269 else
6271 lower_private_allocate (var, new_var, allocator,
6272 allocate_ptr, ilist, ctx, false,
6273 NULL_TREE);
6274 x = build_outer_var_ref (var, ctx);
6276 if (omp_privatize_by_reference (var)
6277 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6278 x = build_fold_addr_expr_loc (clause_loc, x);
6280 SET_DECL_VALUE_EXPR (placeholder, x);
6281 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6282 tree new_vard = new_var;
6283 if (omp_privatize_by_reference (var))
6285 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6286 new_vard = TREE_OPERAND (new_var, 0);
6287 gcc_assert (DECL_P (new_vard));
6289 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6290 if (is_simd
6291 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6292 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6293 rvarp = &rvar;
6294 if (is_simd
6295 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6296 ivar, lvar, rvarp,
6297 &rvar2))
6299 if (new_vard == new_var)
6301 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6302 SET_DECL_VALUE_EXPR (new_var, ivar);
6304 else
6306 SET_DECL_VALUE_EXPR (new_vard,
6307 build_fold_addr_expr (ivar));
6308 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6310 x = lang_hooks.decls.omp_clause_default_ctor
6311 (c, unshare_expr (ivar),
6312 build_outer_var_ref (var, ctx));
6313 if (rvarp && ctx->for_simd_scan_phase)
6315 if (x)
6316 gimplify_and_add (x, &llist[0]);
6317 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6318 if (x)
6319 gimplify_and_add (x, &llist[1]);
6320 break;
6322 else if (rvarp)
6324 if (x)
6326 gimplify_and_add (x, &llist[0]);
6328 tree ivar2 = unshare_expr (lvar);
6329 TREE_OPERAND (ivar2, 1) = sctx.idx;
6330 x = lang_hooks.decls.omp_clause_default_ctor
6331 (c, ivar2, build_outer_var_ref (var, ctx));
6332 gimplify_and_add (x, &llist[0]);
6334 if (rvar2)
6336 x = lang_hooks.decls.omp_clause_default_ctor
6337 (c, unshare_expr (rvar2),
6338 build_outer_var_ref (var, ctx));
6339 gimplify_and_add (x, &llist[0]);
6342 /* For types that need construction, add another
6343 private var which will be default constructed
6344 and optionally initialized with
6345 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6346 loop we want to assign this value instead of
6347 constructing and destructing it in each
6348 iteration. */
6349 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6350 gimple_add_tmp_var (nv);
6351 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6352 ? rvar2
6353 : ivar, 0),
6354 nv);
6355 x = lang_hooks.decls.omp_clause_default_ctor
6356 (c, nv, build_outer_var_ref (var, ctx));
6357 gimplify_and_add (x, ilist);
6359 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6361 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6362 x = DECL_VALUE_EXPR (new_vard);
6363 tree vexpr = nv;
6364 if (new_vard != new_var)
6365 vexpr = build_fold_addr_expr (nv);
6366 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6367 lower_omp (&tseq, ctx);
6368 SET_DECL_VALUE_EXPR (new_vard, x);
6369 gimple_seq_add_seq (ilist, tseq);
6370 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6373 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6374 if (x)
6375 gimplify_and_add (x, dlist);
6378 tree ref = build_outer_var_ref (var, ctx);
6379 x = unshare_expr (ivar);
6380 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6381 ref);
6382 gimplify_and_add (x, &llist[0]);
6384 ref = build_outer_var_ref (var, ctx);
6385 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6386 rvar);
6387 gimplify_and_add (x, &llist[3]);
6389 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6390 if (new_vard == new_var)
6391 SET_DECL_VALUE_EXPR (new_var, lvar);
6392 else
6393 SET_DECL_VALUE_EXPR (new_vard,
6394 build_fold_addr_expr (lvar));
6396 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6397 if (x)
6398 gimplify_and_add (x, &llist[1]);
6400 tree ivar2 = unshare_expr (lvar);
6401 TREE_OPERAND (ivar2, 1) = sctx.idx;
6402 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6403 if (x)
6404 gimplify_and_add (x, &llist[1]);
6406 if (rvar2)
6408 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6409 if (x)
6410 gimplify_and_add (x, &llist[1]);
6412 break;
6414 if (x)
6415 gimplify_and_add (x, &llist[0]);
6416 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6418 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6419 lower_omp (&tseq, ctx);
6420 gimple_seq_add_seq (&llist[0], tseq);
6422 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6423 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6424 lower_omp (&tseq, ctx);
6425 gimple_seq_add_seq (&llist[1], tseq);
6426 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6427 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6428 if (new_vard == new_var)
6429 SET_DECL_VALUE_EXPR (new_var, lvar);
6430 else
6431 SET_DECL_VALUE_EXPR (new_vard,
6432 build_fold_addr_expr (lvar));
6433 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6434 if (x)
6435 gimplify_and_add (x, &llist[1]);
6436 break;
6438 /* If this is a reference to constant size reduction var
6439 with placeholder, we haven't emitted the initializer
6440 for it because it is undesirable if SIMD arrays are used.
6441 But if they aren't used, we need to emit the deferred
6442 initialization now. */
6443 else if (omp_privatize_by_reference (var) && is_simd)
6444 handle_simd_reference (clause_loc, new_vard, ilist);
6446 tree lab2 = NULL_TREE;
6447 if (cond)
6449 gimple *g;
6450 if (!is_parallel_ctx (ctx))
6452 tree condv = create_tmp_var (boolean_type_node);
6453 tree m = build_simple_mem_ref (cond);
6454 g = gimple_build_assign (condv, m);
6455 gimple_seq_add_stmt (ilist, g);
6456 tree lab1
6457 = create_artificial_label (UNKNOWN_LOCATION);
6458 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6459 g = gimple_build_cond (NE_EXPR, condv,
6460 boolean_false_node,
6461 lab2, lab1);
6462 gimple_seq_add_stmt (ilist, g);
6463 gimple_seq_add_stmt (ilist,
6464 gimple_build_label (lab1));
6466 g = gimple_build_assign (build_simple_mem_ref (cond),
6467 boolean_true_node);
6468 gimple_seq_add_stmt (ilist, g);
6470 x = lang_hooks.decls.omp_clause_default_ctor
6471 (c, unshare_expr (new_var),
6472 cond ? NULL_TREE
6473 : build_outer_var_ref (var, ctx));
6474 if (x)
6475 gimplify_and_add (x, ilist);
6477 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6478 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6480 if (ctx->for_simd_scan_phase)
6481 goto do_dtor;
6482 if (x || (!is_simd
6483 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6485 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6486 gimple_add_tmp_var (nv);
6487 ctx->cb.decl_map->put (new_vard, nv);
6488 x = lang_hooks.decls.omp_clause_default_ctor
6489 (c, nv, build_outer_var_ref (var, ctx));
6490 if (x)
6491 gimplify_and_add (x, ilist);
6492 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6494 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6495 tree vexpr = nv;
6496 if (new_vard != new_var)
6497 vexpr = build_fold_addr_expr (nv);
6498 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6499 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6500 lower_omp (&tseq, ctx);
6501 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6502 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6503 gimple_seq_add_seq (ilist, tseq);
6505 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6506 if (is_simd && ctx->scan_exclusive)
6508 tree nv2
6509 = create_tmp_var_raw (TREE_TYPE (new_var));
6510 gimple_add_tmp_var (nv2);
6511 ctx->cb.decl_map->put (nv, nv2);
6512 x = lang_hooks.decls.omp_clause_default_ctor
6513 (c, nv2, build_outer_var_ref (var, ctx));
6514 gimplify_and_add (x, ilist);
6515 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6516 if (x)
6517 gimplify_and_add (x, dlist);
6519 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6520 if (x)
6521 gimplify_and_add (x, dlist);
6523 else if (is_simd
6524 && ctx->scan_exclusive
6525 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6527 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6528 gimple_add_tmp_var (nv2);
6529 ctx->cb.decl_map->put (new_vard, nv2);
6530 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6531 if (x)
6532 gimplify_and_add (x, dlist);
6534 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6535 goto do_dtor;
6538 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6540 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6541 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6542 && is_omp_target (ctx->stmt))
6544 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6545 tree oldv = NULL_TREE;
6546 gcc_assert (d);
6547 if (DECL_HAS_VALUE_EXPR_P (d))
6548 oldv = DECL_VALUE_EXPR (d);
6549 SET_DECL_VALUE_EXPR (d, new_vard);
6550 DECL_HAS_VALUE_EXPR_P (d) = 1;
6551 lower_omp (&tseq, ctx);
6552 if (oldv)
6553 SET_DECL_VALUE_EXPR (d, oldv);
6554 else
6556 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6557 DECL_HAS_VALUE_EXPR_P (d) = 0;
6560 else
6561 lower_omp (&tseq, ctx);
6562 gimple_seq_add_seq (ilist, tseq);
6564 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6565 if (is_simd)
6567 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6568 lower_omp (&tseq, ctx);
6569 gimple_seq_add_seq (dlist, tseq);
6570 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6572 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6573 if (cond)
6575 if (lab2)
6576 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6577 break;
6579 goto do_dtor;
6581 else
6583 x = omp_reduction_init (c, TREE_TYPE (new_var));
6584 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6585 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6587 if (cond)
6589 gimple *g;
6590 tree lab2 = NULL_TREE;
6591 /* GOMP_taskgroup_reduction_register memsets the whole
6592 array to zero. If the initializer is zero, we don't
6593 need to initialize it again, just mark it as ever
6594 used unconditionally, i.e. cond = true. */
6595 if (initializer_zerop (x))
6597 g = gimple_build_assign (build_simple_mem_ref (cond),
6598 boolean_true_node);
6599 gimple_seq_add_stmt (ilist, g);
6600 break;
6603 /* Otherwise, emit
6604 if (!cond) { cond = true; new_var = x; } */
6605 if (!is_parallel_ctx (ctx))
6607 tree condv = create_tmp_var (boolean_type_node);
6608 tree m = build_simple_mem_ref (cond);
6609 g = gimple_build_assign (condv, m);
6610 gimple_seq_add_stmt (ilist, g);
6611 tree lab1
6612 = create_artificial_label (UNKNOWN_LOCATION);
6613 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6614 g = gimple_build_cond (NE_EXPR, condv,
6615 boolean_false_node,
6616 lab2, lab1);
6617 gimple_seq_add_stmt (ilist, g);
6618 gimple_seq_add_stmt (ilist,
6619 gimple_build_label (lab1));
6621 g = gimple_build_assign (build_simple_mem_ref (cond),
6622 boolean_true_node);
6623 gimple_seq_add_stmt (ilist, g);
6624 gimplify_assign (new_var, x, ilist);
6625 if (lab2)
6626 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6627 break;
6630 /* reduction(-:var) sums up the partial results, so it
6631 acts identically to reduction(+:var). */
6632 if (code == MINUS_EXPR)
6633 code = PLUS_EXPR;
6635 bool is_truth_op
6636 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6637 tree new_vard = new_var;
6638 if (is_simd && omp_privatize_by_reference (var))
6640 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6641 new_vard = TREE_OPERAND (new_var, 0);
6642 gcc_assert (DECL_P (new_vard));
6644 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6645 if (is_simd
6646 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6647 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6648 rvarp = &rvar;
6649 if (is_simd
6650 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6651 ivar, lvar, rvarp,
6652 &rvar2))
6654 if (new_vard != new_var)
6656 SET_DECL_VALUE_EXPR (new_vard,
6657 build_fold_addr_expr (lvar));
6658 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6661 tree ref = build_outer_var_ref (var, ctx);
6663 if (rvarp)
6665 if (ctx->for_simd_scan_phase)
6666 break;
6667 gimplify_assign (ivar, ref, &llist[0]);
6668 ref = build_outer_var_ref (var, ctx);
6669 gimplify_assign (ref, rvar, &llist[3]);
6670 break;
6673 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6675 if (sctx.is_simt)
6677 if (!simt_lane)
6678 simt_lane = create_tmp_var (unsigned_type_node);
6679 x = build_call_expr_internal_loc
6680 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6681 TREE_TYPE (ivar), 2, ivar, simt_lane);
6682 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6683 gimplify_assign (ivar, x, &llist[2]);
6685 tree ivar2 = ivar;
6686 tree ref2 = ref;
6687 if (is_truth_op)
6689 tree zero = build_zero_cst (TREE_TYPE (ivar));
6690 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6691 boolean_type_node, ivar,
6692 zero);
6693 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6694 boolean_type_node, ref,
6695 zero);
6697 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6698 if (is_truth_op)
6699 x = fold_convert (TREE_TYPE (ref), x);
6700 ref = build_outer_var_ref (var, ctx);
6701 gimplify_assign (ref, x, &llist[1]);
6704 else
6706 lower_private_allocate (var, new_var, allocator,
6707 allocate_ptr, ilist, ctx,
6708 false, NULL_TREE);
6709 if (omp_privatize_by_reference (var) && is_simd)
6710 handle_simd_reference (clause_loc, new_vard, ilist);
6711 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6712 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6713 break;
6714 gimplify_assign (new_var, x, ilist);
6715 if (is_simd)
6717 tree ref = build_outer_var_ref (var, ctx);
6718 tree new_var2 = new_var;
6719 tree ref2 = ref;
6720 if (is_truth_op)
6722 tree zero = build_zero_cst (TREE_TYPE (new_var));
6723 new_var2
6724 = fold_build2_loc (clause_loc, NE_EXPR,
6725 boolean_type_node, new_var,
6726 zero);
6727 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6728 boolean_type_node, ref,
6729 zero);
6731 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6732 if (is_truth_op)
6733 x = fold_convert (TREE_TYPE (new_var), x);
6734 ref = build_outer_var_ref (var, ctx);
6735 gimplify_assign (ref, x, dlist);
6737 if (allocator)
6738 goto do_dtor;
6741 break;
6743 default:
6744 gcc_unreachable ();
6748 if (tskred_avar)
6750 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6751 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6754 if (known_eq (sctx.max_vf, 1U))
6756 sctx.is_simt = false;
6757 if (ctx->lastprivate_conditional_map)
6759 if (gimple_omp_for_combined_into_p (ctx->stmt))
6761 /* Signal to lower_omp_1 that it should use parent context. */
6762 ctx->combined_into_simd_safelen1 = true;
6763 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6764 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6765 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6767 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6768 omp_context *outer = ctx->outer;
6769 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6770 outer = outer->outer;
6771 tree *v = ctx->lastprivate_conditional_map->get (o);
6772 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6773 tree *pv = outer->lastprivate_conditional_map->get (po);
6774 *v = *pv;
6777 else
6779 /* When not vectorized, treat lastprivate(conditional:) like
6780 normal lastprivate, as there will be just one simd lane
6781 writing the privatized variable. */
6782 delete ctx->lastprivate_conditional_map;
6783 ctx->lastprivate_conditional_map = NULL;
6788 if (nonconst_simd_if)
6790 if (sctx.lane == NULL_TREE)
6792 sctx.idx = create_tmp_var (unsigned_type_node);
6793 sctx.lane = create_tmp_var (unsigned_type_node);
6795 /* FIXME: For now. */
6796 sctx.is_simt = false;
6799 if (sctx.lane || sctx.is_simt)
6801 uid = create_tmp_var (ptr_type_node, "simduid");
6802 /* Don't want uninit warnings on simduid, it is always uninitialized,
6803 but we use it not for the value, but for the DECL_UID only. */
6804 suppress_warning (uid, OPT_Wuninitialized);
6805 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6806 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6807 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6808 gimple_omp_for_set_clauses (ctx->stmt, c);
6810 /* Emit calls denoting privatized variables and initializing a pointer to
6811 structure that holds private variables as fields after ompdevlow pass. */
6812 if (sctx.is_simt)
6814 sctx.simt_eargs[0] = uid;
6815 gimple *g
6816 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6817 gimple_call_set_lhs (g, uid);
6818 gimple_seq_add_stmt (ilist, g);
6819 sctx.simt_eargs.release ();
6821 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6822 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6823 gimple_call_set_lhs (g, simtrec);
6824 gimple_seq_add_stmt (ilist, g);
6826 if (sctx.lane)
6828 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6829 2 + (nonconst_simd_if != NULL),
6830 uid, integer_zero_node,
6831 nonconst_simd_if);
6832 gimple_call_set_lhs (g, sctx.lane);
6833 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6834 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6835 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6836 build_int_cst (unsigned_type_node, 0));
6837 gimple_seq_add_stmt (ilist, g);
6838 if (sctx.lastlane)
6840 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6841 2, uid, sctx.lane);
6842 gimple_call_set_lhs (g, sctx.lastlane);
6843 gimple_seq_add_stmt (dlist, g);
6844 gimple_seq_add_seq (dlist, llist[3]);
6846 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6847 if (llist[2])
6849 tree simt_vf = create_tmp_var (unsigned_type_node);
6850 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6851 gimple_call_set_lhs (g, simt_vf);
6852 gimple_seq_add_stmt (dlist, g);
6854 tree t = build_int_cst (unsigned_type_node, 1);
6855 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6856 gimple_seq_add_stmt (dlist, g);
6858 t = build_int_cst (unsigned_type_node, 0);
6859 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6860 gimple_seq_add_stmt (dlist, g);
6862 tree body = create_artificial_label (UNKNOWN_LOCATION);
6863 tree header = create_artificial_label (UNKNOWN_LOCATION);
6864 tree end = create_artificial_label (UNKNOWN_LOCATION);
6865 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6866 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6868 gimple_seq_add_seq (dlist, llist[2]);
6870 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6871 gimple_seq_add_stmt (dlist, g);
6873 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6874 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6875 gimple_seq_add_stmt (dlist, g);
6877 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6879 for (int i = 0; i < 2; i++)
6880 if (llist[i])
6882 tree vf = create_tmp_var (unsigned_type_node);
6883 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6884 gimple_call_set_lhs (g, vf);
6885 gimple_seq *seq = i == 0 ? ilist : dlist;
6886 gimple_seq_add_stmt (seq, g);
6887 tree t = build_int_cst (unsigned_type_node, 0);
6888 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6889 gimple_seq_add_stmt (seq, g);
6890 tree body = create_artificial_label (UNKNOWN_LOCATION);
6891 tree header = create_artificial_label (UNKNOWN_LOCATION);
6892 tree end = create_artificial_label (UNKNOWN_LOCATION);
6893 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6894 gimple_seq_add_stmt (seq, gimple_build_label (body));
6895 gimple_seq_add_seq (seq, llist[i]);
6896 t = build_int_cst (unsigned_type_node, 1);
6897 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6898 gimple_seq_add_stmt (seq, g);
6899 gimple_seq_add_stmt (seq, gimple_build_label (header));
6900 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6901 gimple_seq_add_stmt (seq, g);
6902 gimple_seq_add_stmt (seq, gimple_build_label (end));
6905 if (sctx.is_simt)
6907 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6908 gimple *g
6909 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6910 gimple_seq_add_stmt (dlist, g);
6913 /* The copyin sequence is not to be executed by the main thread, since
6914 that would result in self-copies. Perhaps not visible to scalars,
6915 but it certainly is to C++ operator=. */
6916 if (copyin_seq)
6918 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6920 x = build2 (NE_EXPR, boolean_type_node, x,
6921 build_int_cst (TREE_TYPE (x), 0));
6922 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6923 gimplify_and_add (x, ilist);
6926 /* If any copyin variable is passed by reference, we must ensure the
6927 master thread doesn't modify it before it is copied over in all
6928 threads. Similarly for variables in both firstprivate and
6929 lastprivate clauses we need to ensure the lastprivate copying
6930 happens after firstprivate copying in all threads. And similarly
6931 for UDRs if initializer expression refers to omp_orig. */
6932 if (copyin_by_ref || lastprivate_firstprivate
6933 || (reduction_omp_orig_ref
6934 && !ctx->scan_inclusive
6935 && !ctx->scan_exclusive))
6937 /* Don't add any barrier for #pragma omp simd or
6938 #pragma omp distribute. */
6939 if (!is_task_ctx (ctx)
6940 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6941 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6942 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6945 /* If max_vf is non-zero, then we can use only a vectorization factor
6946 up to the max_vf we chose. So stick it into the safelen clause. */
6947 if (maybe_ne (sctx.max_vf, 0U))
6949 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6950 OMP_CLAUSE_SAFELEN);
6951 poly_uint64 safe_len;
6952 if (c == NULL_TREE
6953 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6954 && maybe_gt (safe_len, sctx.max_vf)))
6956 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6957 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6958 sctx.max_vf);
6959 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6960 gimple_omp_for_set_clauses (ctx->stmt, c);
6965 /* Create temporary variables for lastprivate(conditional:) implementation
6966 in context CTX with CLAUSES. */
6968 static void
6969 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6971 tree iter_type = NULL_TREE;
6972 tree cond_ptr = NULL_TREE;
6973 tree iter_var = NULL_TREE;
6974 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6975 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6976 tree next = *clauses;
6977 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6978 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6979 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6981 if (is_simd)
6983 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6984 gcc_assert (cc);
6985 if (iter_type == NULL_TREE)
6987 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6988 iter_var = create_tmp_var_raw (iter_type);
6989 DECL_CONTEXT (iter_var) = current_function_decl;
6990 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6991 DECL_CHAIN (iter_var) = ctx->block_vars;
6992 ctx->block_vars = iter_var;
6993 tree c3
6994 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6995 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6996 OMP_CLAUSE_DECL (c3) = iter_var;
6997 OMP_CLAUSE_CHAIN (c3) = *clauses;
6998 *clauses = c3;
6999 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7001 next = OMP_CLAUSE_CHAIN (cc);
7002 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7003 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7004 ctx->lastprivate_conditional_map->put (o, v);
7005 continue;
7007 if (iter_type == NULL)
7009 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7011 struct omp_for_data fd;
7012 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7013 NULL);
7014 iter_type = unsigned_type_for (fd.iter_type);
7016 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7017 iter_type = unsigned_type_node;
7018 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7019 if (c2)
7021 cond_ptr
7022 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7023 OMP_CLAUSE_DECL (c2) = cond_ptr;
7025 else
7027 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7028 DECL_CONTEXT (cond_ptr) = current_function_decl;
7029 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7030 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7031 ctx->block_vars = cond_ptr;
7032 c2 = build_omp_clause (UNKNOWN_LOCATION,
7033 OMP_CLAUSE__CONDTEMP_);
7034 OMP_CLAUSE_DECL (c2) = cond_ptr;
7035 OMP_CLAUSE_CHAIN (c2) = *clauses;
7036 *clauses = c2;
7038 iter_var = create_tmp_var_raw (iter_type);
7039 DECL_CONTEXT (iter_var) = current_function_decl;
7040 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7041 DECL_CHAIN (iter_var) = ctx->block_vars;
7042 ctx->block_vars = iter_var;
7043 tree c3
7044 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7045 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7046 OMP_CLAUSE_DECL (c3) = iter_var;
7047 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7048 OMP_CLAUSE_CHAIN (c2) = c3;
7049 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7051 tree v = create_tmp_var_raw (iter_type);
7052 DECL_CONTEXT (v) = current_function_decl;
7053 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7054 DECL_CHAIN (v) = ctx->block_vars;
7055 ctx->block_vars = v;
7056 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7057 ctx->lastprivate_conditional_map->put (o, v);
7062 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7063 both parallel and workshare constructs. PREDICATE may be NULL if it's
7064 always true. BODY_P is the sequence to insert early initialization
7065 if needed, STMT_LIST is where the non-conditional lastprivate handling
7066 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7067 section. */
7069 static void
7070 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7071 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7072 omp_context *ctx)
7074 tree x, c, label = NULL, orig_clauses = clauses;
7075 bool par_clauses = false;
7076 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7077 unsigned HOST_WIDE_INT conditional_off = 0;
7078 gimple_seq post_stmt_list = NULL;
7080 /* Early exit if there are no lastprivate or linear clauses. */
7081 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7082 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7083 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7084 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7085 break;
7086 if (clauses == NULL)
7088 /* If this was a workshare clause, see if it had been combined
7089 with its parallel. In that case, look for the clauses on the
7090 parallel statement itself. */
7091 if (is_parallel_ctx (ctx))
7092 return;
7094 ctx = ctx->outer;
7095 if (ctx == NULL || !is_parallel_ctx (ctx))
7096 return;
7098 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7099 OMP_CLAUSE_LASTPRIVATE);
7100 if (clauses == NULL)
7101 return;
7102 par_clauses = true;
7105 bool maybe_simt = false;
7106 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7107 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7109 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7110 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7111 if (simduid)
7112 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7115 if (predicate)
7117 gcond *stmt;
7118 tree label_true, arm1, arm2;
7119 enum tree_code pred_code = TREE_CODE (predicate);
7121 label = create_artificial_label (UNKNOWN_LOCATION);
7122 label_true = create_artificial_label (UNKNOWN_LOCATION);
7123 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7125 arm1 = TREE_OPERAND (predicate, 0);
7126 arm2 = TREE_OPERAND (predicate, 1);
7127 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7128 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7130 else
7132 arm1 = predicate;
7133 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7134 arm2 = boolean_false_node;
7135 pred_code = NE_EXPR;
7137 if (maybe_simt)
7139 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7140 c = fold_convert (integer_type_node, c);
7141 simtcond = create_tmp_var (integer_type_node);
7142 gimplify_assign (simtcond, c, stmt_list);
7143 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7144 1, simtcond);
7145 c = create_tmp_var (integer_type_node);
7146 gimple_call_set_lhs (g, c);
7147 gimple_seq_add_stmt (stmt_list, g);
7148 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7149 label_true, label);
7151 else
7152 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7153 gimple_seq_add_stmt (stmt_list, stmt);
7154 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7157 tree cond_ptr = NULL_TREE;
7158 for (c = clauses; c ;)
7160 tree var, new_var;
7161 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7162 gimple_seq *this_stmt_list = stmt_list;
7163 tree lab2 = NULL_TREE;
7165 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7166 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7167 && ctx->lastprivate_conditional_map
7168 && !ctx->combined_into_simd_safelen1)
7170 gcc_assert (body_p);
7171 if (simduid)
7172 goto next;
7173 if (cond_ptr == NULL_TREE)
7175 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7176 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7178 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7179 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7180 tree v = *ctx->lastprivate_conditional_map->get (o);
7181 gimplify_assign (v, build_zero_cst (type), body_p);
7182 this_stmt_list = cstmt_list;
7183 tree mem;
7184 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7186 mem = build2 (MEM_REF, type, cond_ptr,
7187 build_int_cst (TREE_TYPE (cond_ptr),
7188 conditional_off));
7189 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7191 else
7192 mem = build4 (ARRAY_REF, type, cond_ptr,
7193 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7194 tree mem2 = copy_node (mem);
7195 gimple_seq seq = NULL;
7196 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7197 gimple_seq_add_seq (this_stmt_list, seq);
7198 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7199 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7200 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7201 gimple_seq_add_stmt (this_stmt_list, g);
7202 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7203 gimplify_assign (mem2, v, this_stmt_list);
7205 else if (predicate
7206 && ctx->combined_into_simd_safelen1
7207 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7208 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7209 && ctx->lastprivate_conditional_map)
7210 this_stmt_list = &post_stmt_list;
7212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7213 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7214 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7216 var = OMP_CLAUSE_DECL (c);
7217 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7218 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7219 && is_taskloop_ctx (ctx))
7221 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7222 new_var = lookup_decl (var, ctx->outer);
7224 else
7226 new_var = lookup_decl (var, ctx);
7227 /* Avoid uninitialized warnings for lastprivate and
7228 for linear iterators. */
7229 if (predicate
7230 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7231 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7232 suppress_warning (new_var, OPT_Wuninitialized);
7235 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7237 tree val = DECL_VALUE_EXPR (new_var);
7238 if (TREE_CODE (val) == ARRAY_REF
7239 && VAR_P (TREE_OPERAND (val, 0))
7240 && lookup_attribute ("omp simd array",
7241 DECL_ATTRIBUTES (TREE_OPERAND (val,
7242 0))))
7244 if (lastlane == NULL)
7246 lastlane = create_tmp_var (unsigned_type_node);
7247 gcall *g
7248 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7249 2, simduid,
7250 TREE_OPERAND (val, 1));
7251 gimple_call_set_lhs (g, lastlane);
7252 gimple_seq_add_stmt (this_stmt_list, g);
7254 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7255 TREE_OPERAND (val, 0), lastlane,
7256 NULL_TREE, NULL_TREE);
7257 TREE_THIS_NOTRAP (new_var) = 1;
7260 else if (maybe_simt)
7262 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7263 ? DECL_VALUE_EXPR (new_var)
7264 : new_var);
7265 if (simtlast == NULL)
7267 simtlast = create_tmp_var (unsigned_type_node);
7268 gcall *g = gimple_build_call_internal
7269 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7270 gimple_call_set_lhs (g, simtlast);
7271 gimple_seq_add_stmt (this_stmt_list, g);
7273 x = build_call_expr_internal_loc
7274 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7275 TREE_TYPE (val), 2, val, simtlast);
7276 new_var = unshare_expr (new_var);
7277 gimplify_assign (new_var, x, this_stmt_list);
7278 new_var = unshare_expr (new_var);
7281 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7282 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7284 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7285 gimple_seq_add_seq (this_stmt_list,
7286 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7287 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7289 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7290 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7292 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7293 gimple_seq_add_seq (this_stmt_list,
7294 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7295 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7298 x = NULL_TREE;
7299 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7300 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7301 && is_taskloop_ctx (ctx))
7303 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7304 ctx->outer->outer);
7305 if (is_global_var (ovar))
7306 x = ovar;
7308 if (!x)
7309 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7310 if (omp_privatize_by_reference (var))
7311 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7312 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7313 gimplify_and_add (x, this_stmt_list);
7315 if (lab2)
7316 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7319 next:
7320 c = OMP_CLAUSE_CHAIN (c);
7321 if (c == NULL && !par_clauses)
7323 /* If this was a workshare clause, see if it had been combined
7324 with its parallel. In that case, continue looking for the
7325 clauses also on the parallel statement itself. */
7326 if (is_parallel_ctx (ctx))
7327 break;
7329 ctx = ctx->outer;
7330 if (ctx == NULL || !is_parallel_ctx (ctx))
7331 break;
7333 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7334 OMP_CLAUSE_LASTPRIVATE);
7335 par_clauses = true;
7339 if (label)
7340 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7341 gimple_seq_add_seq (stmt_list, post_stmt_list);
7344 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7345 (which might be a placeholder). INNER is true if this is an inner
7346 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7347 join markers. Generate the before-loop forking sequence in
7348 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7349 general form of these sequences is
7351 GOACC_REDUCTION_SETUP
7352 GOACC_FORK
7353 GOACC_REDUCTION_INIT
7355 GOACC_REDUCTION_FINI
7356 GOACC_JOIN
7357 GOACC_REDUCTION_TEARDOWN. */
7359 static void
7360 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7361 gcall *fork, gcall *private_marker, gcall *join,
7362 gimple_seq *fork_seq, gimple_seq *join_seq,
7363 omp_context *ctx)
7365 gimple_seq before_fork = NULL;
7366 gimple_seq after_fork = NULL;
7367 gimple_seq before_join = NULL;
7368 gimple_seq after_join = NULL;
7369 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7370 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7371 unsigned offset = 0;
7373 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7374 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7376 /* No 'reduction' clauses on OpenACC 'kernels'. */
7377 gcc_checking_assert (!is_oacc_kernels (ctx));
7378 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7379 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7381 tree orig = OMP_CLAUSE_DECL (c);
7382 tree var = maybe_lookup_decl (orig, ctx);
7383 tree ref_to_res = NULL_TREE;
7384 tree incoming, outgoing, v1, v2, v3;
7385 bool is_private = false;
7387 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7388 if (rcode == MINUS_EXPR)
7389 rcode = PLUS_EXPR;
7390 else if (rcode == TRUTH_ANDIF_EXPR)
7391 rcode = BIT_AND_EXPR;
7392 else if (rcode == TRUTH_ORIF_EXPR)
7393 rcode = BIT_IOR_EXPR;
7394 tree op = build_int_cst (unsigned_type_node, rcode);
7396 if (!var)
7397 var = orig;
7399 incoming = outgoing = var;
7401 if (!inner)
7403 /* See if an outer construct also reduces this variable. */
7404 omp_context *outer = ctx;
7406 while (omp_context *probe = outer->outer)
7408 enum gimple_code type = gimple_code (probe->stmt);
7409 tree cls;
7411 switch (type)
7413 case GIMPLE_OMP_FOR:
7414 cls = gimple_omp_for_clauses (probe->stmt);
7415 break;
7417 case GIMPLE_OMP_TARGET:
7418 /* No 'reduction' clauses inside OpenACC 'kernels'
7419 regions. */
7420 gcc_checking_assert (!is_oacc_kernels (probe));
7422 if (!is_gimple_omp_offloaded (probe->stmt))
7423 goto do_lookup;
7425 cls = gimple_omp_target_clauses (probe->stmt);
7426 break;
7428 default:
7429 goto do_lookup;
7432 outer = probe;
7433 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7434 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7435 && orig == OMP_CLAUSE_DECL (cls))
7437 incoming = outgoing = lookup_decl (orig, probe);
7438 goto has_outer_reduction;
7440 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7441 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7442 && orig == OMP_CLAUSE_DECL (cls))
7444 is_private = true;
7445 goto do_lookup;
7449 do_lookup:
7450 /* This is the outermost construct with this reduction,
7451 see if there's a mapping for it. */
7452 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7453 && maybe_lookup_field (orig, outer) && !is_private)
7455 ref_to_res = build_receiver_ref (orig, false, outer);
7456 if (omp_privatize_by_reference (orig))
7457 ref_to_res = build_simple_mem_ref (ref_to_res);
7459 tree type = TREE_TYPE (var);
7460 if (POINTER_TYPE_P (type))
7461 type = TREE_TYPE (type);
7463 outgoing = var;
7464 incoming = omp_reduction_init_op (loc, rcode, type);
7466 else
7468 /* Try to look at enclosing contexts for reduction var,
7469 use original if no mapping found. */
7470 tree t = NULL_TREE;
7471 omp_context *c = ctx->outer;
7472 while (c && !t)
7474 t = maybe_lookup_decl (orig, c);
7475 c = c->outer;
7477 incoming = outgoing = (t ? t : orig);
7480 has_outer_reduction:;
7483 if (!ref_to_res)
7484 ref_to_res = integer_zero_node;
7486 if (omp_privatize_by_reference (orig))
7488 tree type = TREE_TYPE (var);
7489 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7491 if (!inner)
7493 tree x = create_tmp_var (TREE_TYPE (type), id);
7494 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7497 v1 = create_tmp_var (type, id);
7498 v2 = create_tmp_var (type, id);
7499 v3 = create_tmp_var (type, id);
7501 gimplify_assign (v1, var, fork_seq);
7502 gimplify_assign (v2, var, fork_seq);
7503 gimplify_assign (v3, var, fork_seq);
7505 var = build_simple_mem_ref (var);
7506 v1 = build_simple_mem_ref (v1);
7507 v2 = build_simple_mem_ref (v2);
7508 v3 = build_simple_mem_ref (v3);
7509 outgoing = build_simple_mem_ref (outgoing);
7511 if (!TREE_CONSTANT (incoming))
7512 incoming = build_simple_mem_ref (incoming);
7514 else
7515 v1 = v2 = v3 = var;
7517 /* Determine position in reduction buffer, which may be used
7518 by target. The parser has ensured that this is not a
7519 variable-sized type. */
7520 fixed_size_mode mode
7521 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7522 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7523 offset = (offset + align - 1) & ~(align - 1);
7524 tree off = build_int_cst (sizetype, offset);
7525 offset += GET_MODE_SIZE (mode);
7527 if (!init_code)
7529 init_code = build_int_cst (integer_type_node,
7530 IFN_GOACC_REDUCTION_INIT);
7531 fini_code = build_int_cst (integer_type_node,
7532 IFN_GOACC_REDUCTION_FINI);
7533 setup_code = build_int_cst (integer_type_node,
7534 IFN_GOACC_REDUCTION_SETUP);
7535 teardown_code = build_int_cst (integer_type_node,
7536 IFN_GOACC_REDUCTION_TEARDOWN);
7539 tree setup_call
7540 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7541 TREE_TYPE (var), 6, setup_code,
7542 unshare_expr (ref_to_res),
7543 incoming, level, op, off);
7544 tree init_call
7545 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7546 TREE_TYPE (var), 6, init_code,
7547 unshare_expr (ref_to_res),
7548 v1, level, op, off);
7549 tree fini_call
7550 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7551 TREE_TYPE (var), 6, fini_code,
7552 unshare_expr (ref_to_res),
7553 v2, level, op, off);
7554 tree teardown_call
7555 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7556 TREE_TYPE (var), 6, teardown_code,
7557 ref_to_res, v3, level, op, off);
7559 gimplify_assign (v1, setup_call, &before_fork);
7560 gimplify_assign (v2, init_call, &after_fork);
7561 gimplify_assign (v3, fini_call, &before_join);
7562 gimplify_assign (outgoing, teardown_call, &after_join);
7565 /* Now stitch things together. */
7566 gimple_seq_add_seq (fork_seq, before_fork);
7567 if (private_marker)
7568 gimple_seq_add_stmt (fork_seq, private_marker);
7569 if (fork)
7570 gimple_seq_add_stmt (fork_seq, fork);
7571 gimple_seq_add_seq (fork_seq, after_fork);
7573 gimple_seq_add_seq (join_seq, before_join);
7574 if (join)
7575 gimple_seq_add_stmt (join_seq, join);
7576 gimple_seq_add_seq (join_seq, after_join);
7579 /* Generate code to implement the REDUCTION clauses, append it
7580 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7581 that should be emitted also inside of the critical section,
7582 in that case clear *CLIST afterwards, otherwise leave it as is
7583 and let the caller emit it itself. */
7585 static void
7586 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7587 gimple_seq *clist, omp_context *ctx)
7589 gimple_seq sub_seq = NULL;
7590 gimple *stmt;
7591 tree x, c;
7592 int count = 0;
7594 /* OpenACC loop reductions are handled elsewhere. */
7595 if (is_gimple_omp_oacc (ctx->stmt))
7596 return;
7598 /* SIMD reductions are handled in lower_rec_input_clauses. */
7599 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7600 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7601 return;
7603 /* inscan reductions are handled elsewhere. */
7604 if (ctx->scan_inclusive || ctx->scan_exclusive)
7605 return;
7607 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7608 update in that case, otherwise use a lock. */
7609 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7610 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7611 && !OMP_CLAUSE_REDUCTION_TASK (c))
7613 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7614 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7616 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7617 count = -1;
7618 break;
7620 count++;
7623 if (count == 0)
7624 return;
7626 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7628 tree var, ref, new_var, orig_var;
7629 enum tree_code code;
7630 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7632 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7633 || OMP_CLAUSE_REDUCTION_TASK (c))
7634 continue;
7636 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7637 orig_var = var = OMP_CLAUSE_DECL (c);
7638 if (TREE_CODE (var) == MEM_REF)
7640 var = TREE_OPERAND (var, 0);
7641 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7642 var = TREE_OPERAND (var, 0);
7643 if (TREE_CODE (var) == ADDR_EXPR)
7644 var = TREE_OPERAND (var, 0);
7645 else
7647 /* If this is a pointer or referenced based array
7648 section, the var could be private in the outer
7649 context e.g. on orphaned loop construct. Pretend this
7650 is private variable's outer reference. */
7651 ccode = OMP_CLAUSE_PRIVATE;
7652 if (TREE_CODE (var) == INDIRECT_REF)
7653 var = TREE_OPERAND (var, 0);
7655 orig_var = var;
7656 if (is_variable_sized (var))
7658 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7659 var = DECL_VALUE_EXPR (var);
7660 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7661 var = TREE_OPERAND (var, 0);
7662 gcc_assert (DECL_P (var));
7665 new_var = lookup_decl (var, ctx);
7666 if (var == OMP_CLAUSE_DECL (c)
7667 && omp_privatize_by_reference (var))
7668 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7669 ref = build_outer_var_ref (var, ctx, ccode);
7670 code = OMP_CLAUSE_REDUCTION_CODE (c);
7672 /* reduction(-:var) sums up the partial results, so it acts
7673 identically to reduction(+:var). */
7674 if (code == MINUS_EXPR)
7675 code = PLUS_EXPR;
7677 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7678 if (count == 1)
7680 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7682 addr = save_expr (addr);
7683 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7684 tree new_var2 = new_var;
7685 tree ref2 = ref;
7686 if (is_truth_op)
7688 tree zero = build_zero_cst (TREE_TYPE (new_var));
7689 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7690 boolean_type_node, new_var, zero);
7691 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7692 ref, zero);
7694 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7695 new_var2);
7696 if (is_truth_op)
7697 x = fold_convert (TREE_TYPE (new_var), x);
7698 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7699 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7700 gimplify_and_add (x, stmt_seqp);
7701 return;
7703 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7705 tree d = OMP_CLAUSE_DECL (c);
7706 tree type = TREE_TYPE (d);
7707 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7708 tree i = create_tmp_var (TREE_TYPE (v));
7709 tree ptype = build_pointer_type (TREE_TYPE (type));
7710 tree bias = TREE_OPERAND (d, 1);
7711 d = TREE_OPERAND (d, 0);
7712 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7714 tree b = TREE_OPERAND (d, 1);
7715 b = maybe_lookup_decl (b, ctx);
7716 if (b == NULL)
7718 b = TREE_OPERAND (d, 1);
7719 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7721 if (integer_zerop (bias))
7722 bias = b;
7723 else
7725 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7726 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7727 TREE_TYPE (b), b, bias);
7729 d = TREE_OPERAND (d, 0);
7731 /* For ref build_outer_var_ref already performs this, so
7732 only new_var needs a dereference. */
7733 if (TREE_CODE (d) == INDIRECT_REF)
7735 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7736 gcc_assert (omp_privatize_by_reference (var)
7737 && var == orig_var);
7739 else if (TREE_CODE (d) == ADDR_EXPR)
7741 if (orig_var == var)
7743 new_var = build_fold_addr_expr (new_var);
7744 ref = build_fold_addr_expr (ref);
7747 else
7749 gcc_assert (orig_var == var);
7750 if (omp_privatize_by_reference (var))
7751 ref = build_fold_addr_expr (ref);
7753 if (DECL_P (v))
7755 tree t = maybe_lookup_decl (v, ctx);
7756 if (t)
7757 v = t;
7758 else
7759 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7760 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7762 if (!integer_zerop (bias))
7764 bias = fold_convert_loc (clause_loc, sizetype, bias);
7765 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7766 TREE_TYPE (new_var), new_var,
7767 unshare_expr (bias));
7768 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7769 TREE_TYPE (ref), ref, bias);
7771 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7772 ref = fold_convert_loc (clause_loc, ptype, ref);
7773 tree m = create_tmp_var (ptype);
7774 gimplify_assign (m, new_var, stmt_seqp);
7775 new_var = m;
7776 m = create_tmp_var (ptype);
7777 gimplify_assign (m, ref, stmt_seqp);
7778 ref = m;
7779 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7780 tree body = create_artificial_label (UNKNOWN_LOCATION);
7781 tree end = create_artificial_label (UNKNOWN_LOCATION);
7782 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7783 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7784 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7785 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7787 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7788 tree decl_placeholder
7789 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7790 SET_DECL_VALUE_EXPR (placeholder, out);
7791 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7792 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7793 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7794 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7795 gimple_seq_add_seq (&sub_seq,
7796 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7797 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7798 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7799 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7801 else
7803 tree out2 = out;
7804 tree priv2 = priv;
7805 if (is_truth_op)
7807 tree zero = build_zero_cst (TREE_TYPE (out));
7808 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7809 boolean_type_node, out, zero);
7810 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7811 boolean_type_node, priv, zero);
7813 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7814 if (is_truth_op)
7815 x = fold_convert (TREE_TYPE (out), x);
7816 out = unshare_expr (out);
7817 gimplify_assign (out, x, &sub_seq);
7819 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7820 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7821 gimple_seq_add_stmt (&sub_seq, g);
7822 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7823 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7824 gimple_seq_add_stmt (&sub_seq, g);
7825 g = gimple_build_assign (i, PLUS_EXPR, i,
7826 build_int_cst (TREE_TYPE (i), 1));
7827 gimple_seq_add_stmt (&sub_seq, g);
7828 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7829 gimple_seq_add_stmt (&sub_seq, g);
7830 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7832 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7834 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7836 if (omp_privatize_by_reference (var)
7837 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7838 TREE_TYPE (ref)))
7839 ref = build_fold_addr_expr_loc (clause_loc, ref);
7840 SET_DECL_VALUE_EXPR (placeholder, ref);
7841 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7842 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7843 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7844 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7845 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7847 else
7849 tree new_var2 = new_var;
7850 tree ref2 = ref;
7851 if (is_truth_op)
7853 tree zero = build_zero_cst (TREE_TYPE (new_var));
7854 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7855 boolean_type_node, new_var, zero);
7856 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7857 ref, zero);
7859 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7860 if (is_truth_op)
7861 x = fold_convert (TREE_TYPE (new_var), x);
7862 ref = build_outer_var_ref (var, ctx);
7863 gimplify_assign (ref, x, &sub_seq);
7867 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7869 gimple_seq_add_stmt (stmt_seqp, stmt);
7871 gimple_seq_add_seq (stmt_seqp, sub_seq);
7873 if (clist)
7875 gimple_seq_add_seq (stmt_seqp, *clist);
7876 *clist = NULL;
7879 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7881 gimple_seq_add_stmt (stmt_seqp, stmt);
7885 /* Generate code to implement the COPYPRIVATE clauses. */
7887 static void
7888 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7889 omp_context *ctx)
7891 tree c;
7893 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7895 tree var, new_var, ref, x;
7896 bool by_ref;
7897 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7899 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7900 continue;
7902 var = OMP_CLAUSE_DECL (c);
7903 by_ref = use_pointer_for_field (var, NULL);
7905 ref = build_sender_ref (var, ctx);
7906 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7907 if (by_ref)
7909 x = build_fold_addr_expr_loc (clause_loc, new_var);
7910 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7912 gimplify_assign (ref, x, slist);
7914 ref = build_receiver_ref (var, false, ctx);
7915 if (by_ref)
7917 ref = fold_convert_loc (clause_loc,
7918 build_pointer_type (TREE_TYPE (new_var)),
7919 ref);
7920 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7922 if (omp_privatize_by_reference (var))
7924 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7925 ref = build_simple_mem_ref_loc (clause_loc, ref);
7926 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7928 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7929 gimplify_and_add (x, rlist);
7934 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7935 and REDUCTION from the sender (aka parent) side. */
7937 static void
7938 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7939 omp_context *ctx)
7941 tree c, t;
7942 int ignored_looptemp = 0;
7943 bool is_taskloop = false;
7945 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7946 by GOMP_taskloop. */
7947 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7949 ignored_looptemp = 2;
7950 is_taskloop = true;
7953 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7955 tree val, ref, x, var;
7956 bool by_ref, do_in = false, do_out = false;
7957 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7959 switch (OMP_CLAUSE_CODE (c))
7961 case OMP_CLAUSE_PRIVATE:
7962 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7963 break;
7964 continue;
7965 case OMP_CLAUSE_FIRSTPRIVATE:
7966 case OMP_CLAUSE_COPYIN:
7967 case OMP_CLAUSE_LASTPRIVATE:
7968 case OMP_CLAUSE_IN_REDUCTION:
7969 case OMP_CLAUSE__REDUCTEMP_:
7970 break;
7971 case OMP_CLAUSE_REDUCTION:
7972 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7973 continue;
7974 break;
7975 case OMP_CLAUSE_SHARED:
7976 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7977 break;
7978 continue;
7979 case OMP_CLAUSE__LOOPTEMP_:
7980 if (ignored_looptemp)
7982 ignored_looptemp--;
7983 continue;
7985 break;
7986 default:
7987 continue;
7990 val = OMP_CLAUSE_DECL (c);
7991 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7992 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7993 && TREE_CODE (val) == MEM_REF)
7995 val = TREE_OPERAND (val, 0);
7996 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7997 val = TREE_OPERAND (val, 0);
7998 if (TREE_CODE (val) == INDIRECT_REF
7999 || TREE_CODE (val) == ADDR_EXPR)
8000 val = TREE_OPERAND (val, 0);
8001 if (is_variable_sized (val))
8002 continue;
8005 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8006 outer taskloop region. */
8007 omp_context *ctx_for_o = ctx;
8008 if (is_taskloop
8009 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8010 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8011 ctx_for_o = ctx->outer;
8013 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8015 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8016 && is_global_var (var)
8017 && (val == OMP_CLAUSE_DECL (c)
8018 || !is_task_ctx (ctx)
8019 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8020 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8021 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8022 != POINTER_TYPE)))))
8023 continue;
8025 t = omp_member_access_dummy_var (var);
8026 if (t)
8028 var = DECL_VALUE_EXPR (var);
8029 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8030 if (o != t)
8031 var = unshare_and_remap (var, t, o);
8032 else
8033 var = unshare_expr (var);
8036 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8038 /* Handle taskloop firstprivate/lastprivate, where the
8039 lastprivate on GIMPLE_OMP_TASK is represented as
8040 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8041 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8042 x = omp_build_component_ref (ctx->sender_decl, f);
8043 if (use_pointer_for_field (val, ctx))
8044 var = build_fold_addr_expr (var);
8045 gimplify_assign (x, var, ilist);
8046 DECL_ABSTRACT_ORIGIN (f) = NULL;
8047 continue;
8050 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8051 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8052 || val == OMP_CLAUSE_DECL (c))
8053 && is_variable_sized (val))
8054 continue;
8055 by_ref = use_pointer_for_field (val, NULL);
8057 switch (OMP_CLAUSE_CODE (c))
8059 case OMP_CLAUSE_FIRSTPRIVATE:
8060 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8061 && !by_ref
8062 && is_task_ctx (ctx))
8063 suppress_warning (var);
8064 do_in = true;
8065 break;
8067 case OMP_CLAUSE_PRIVATE:
8068 case OMP_CLAUSE_COPYIN:
8069 case OMP_CLAUSE__LOOPTEMP_:
8070 case OMP_CLAUSE__REDUCTEMP_:
8071 do_in = true;
8072 break;
8074 case OMP_CLAUSE_LASTPRIVATE:
8075 if (by_ref || omp_privatize_by_reference (val))
8077 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8078 continue;
8079 do_in = true;
8081 else
8083 do_out = true;
8084 if (lang_hooks.decls.omp_private_outer_ref (val))
8085 do_in = true;
8087 break;
8089 case OMP_CLAUSE_REDUCTION:
8090 case OMP_CLAUSE_IN_REDUCTION:
8091 do_in = true;
8092 if (val == OMP_CLAUSE_DECL (c))
8094 if (is_task_ctx (ctx))
8095 by_ref = use_pointer_for_field (val, ctx);
8096 else
8097 do_out = !(by_ref || omp_privatize_by_reference (val));
8099 else
8100 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8101 break;
8103 default:
8104 gcc_unreachable ();
8107 if (do_in)
8109 ref = build_sender_ref (val, ctx);
8110 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8111 gimplify_assign (ref, x, ilist);
8112 if (is_task_ctx (ctx))
8113 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8116 if (do_out)
8118 ref = build_sender_ref (val, ctx);
8119 gimplify_assign (var, ref, olist);
8124 /* Generate code to implement SHARED from the sender (aka parent)
8125 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8126 list things that got automatically shared. */
8128 static void
8129 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8131 tree var, ovar, nvar, t, f, x, record_type;
8133 if (ctx->record_type == NULL)
8134 return;
8136 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8137 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8139 ovar = DECL_ABSTRACT_ORIGIN (f);
8140 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8141 continue;
8143 nvar = maybe_lookup_decl (ovar, ctx);
8144 if (!nvar
8145 || !DECL_HAS_VALUE_EXPR_P (nvar)
8146 || (ctx->allocate_map
8147 && ctx->allocate_map->get (ovar)))
8148 continue;
8150 /* If CTX is a nested parallel directive. Find the immediately
8151 enclosing parallel or workshare construct that contains a
8152 mapping for OVAR. */
8153 var = lookup_decl_in_outer_ctx (ovar, ctx);
8155 t = omp_member_access_dummy_var (var);
8156 if (t)
8158 var = DECL_VALUE_EXPR (var);
8159 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8160 if (o != t)
8161 var = unshare_and_remap (var, t, o);
8162 else
8163 var = unshare_expr (var);
8166 if (use_pointer_for_field (ovar, ctx))
8168 x = build_sender_ref (ovar, ctx);
8169 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8170 && TREE_TYPE (f) == TREE_TYPE (ovar))
8172 gcc_assert (is_parallel_ctx (ctx)
8173 && DECL_ARTIFICIAL (ovar));
8174 /* _condtemp_ clause. */
8175 var = build_constructor (TREE_TYPE (x), NULL);
8177 else
8178 var = build_fold_addr_expr (var);
8179 gimplify_assign (x, var, ilist);
8181 else
8183 x = build_sender_ref (ovar, ctx);
8184 gimplify_assign (x, var, ilist);
8186 if (!TREE_READONLY (var)
8187 /* We don't need to receive a new reference to a result
8188 or parm decl. In fact we may not store to it as we will
8189 invalidate any pending RSO and generate wrong gimple
8190 during inlining. */
8191 && !((TREE_CODE (var) == RESULT_DECL
8192 || TREE_CODE (var) == PARM_DECL)
8193 && DECL_BY_REFERENCE (var)))
8195 x = build_sender_ref (ovar, ctx);
8196 gimplify_assign (var, x, olist);
8202 /* Emit an OpenACC head marker call, encapulating the partitioning and
8203 other information that must be processed by the target compiler.
8204 Return the maximum number of dimensions the associated loop might
8205 be partitioned over. */
8207 static unsigned
8208 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8209 gimple_seq *seq, omp_context *ctx)
8211 unsigned levels = 0;
8212 unsigned tag = 0;
8213 tree gang_static = NULL_TREE;
8214 auto_vec<tree, 5> args;
8216 args.quick_push (build_int_cst
8217 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8218 args.quick_push (ddvar);
8219 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8221 switch (OMP_CLAUSE_CODE (c))
8223 case OMP_CLAUSE_GANG:
8224 tag |= OLF_DIM_GANG;
8225 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8226 /* static:* is represented by -1, and we can ignore it, as
8227 scheduling is always static. */
8228 if (gang_static && integer_minus_onep (gang_static))
8229 gang_static = NULL_TREE;
8230 levels++;
8231 break;
8233 case OMP_CLAUSE_WORKER:
8234 tag |= OLF_DIM_WORKER;
8235 levels++;
8236 break;
8238 case OMP_CLAUSE_VECTOR:
8239 tag |= OLF_DIM_VECTOR;
8240 levels++;
8241 break;
8243 case OMP_CLAUSE_SEQ:
8244 tag |= OLF_SEQ;
8245 break;
8247 case OMP_CLAUSE_AUTO:
8248 tag |= OLF_AUTO;
8249 break;
8251 case OMP_CLAUSE_INDEPENDENT:
8252 tag |= OLF_INDEPENDENT;
8253 break;
8255 case OMP_CLAUSE_TILE:
8256 tag |= OLF_TILE;
8257 break;
8259 default:
8260 continue;
8264 if (gang_static)
8266 if (DECL_P (gang_static))
8267 gang_static = build_outer_var_ref (gang_static, ctx);
8268 tag |= OLF_GANG_STATIC;
8271 omp_context *tgt = enclosing_target_ctx (ctx);
8272 if (!tgt || is_oacc_parallel_or_serial (tgt))
8274 else if (is_oacc_kernels (tgt))
8275 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8276 gcc_unreachable ();
8277 else if (is_oacc_kernels_decomposed_part (tgt))
8279 else
8280 gcc_unreachable ();
8282 /* In a parallel region, loops are implicitly INDEPENDENT. */
8283 if (!tgt || is_oacc_parallel_or_serial (tgt))
8284 tag |= OLF_INDEPENDENT;
8286 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8287 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8288 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8290 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8291 gcc_assert (!(tag & OLF_AUTO));
8294 if (tag & OLF_TILE)
8295 /* Tiling could use all 3 levels. */
8296 levels = 3;
8297 else
8299 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8300 Ensure at least one level, or 2 for possible auto
8301 partitioning */
8302 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8303 << OLF_DIM_BASE) | OLF_SEQ));
8305 if (levels < 1u + maybe_auto)
8306 levels = 1u + maybe_auto;
8309 args.quick_push (build_int_cst (integer_type_node, levels));
8310 args.quick_push (build_int_cst (integer_type_node, tag));
8311 if (gang_static)
8312 args.quick_push (gang_static);
8314 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8315 gimple_set_location (call, loc);
8316 gimple_set_lhs (call, ddvar);
8317 gimple_seq_add_stmt (seq, call);
8319 return levels;
8322 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8323 partitioning level of the enclosed region. */
8325 static void
8326 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8327 tree tofollow, gimple_seq *seq)
8329 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8330 : IFN_UNIQUE_OACC_TAIL_MARK);
8331 tree marker = build_int_cst (integer_type_node, marker_kind);
8332 int nargs = 2 + (tofollow != NULL_TREE);
8333 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8334 marker, ddvar, tofollow);
8335 gimple_set_location (call, loc);
8336 gimple_set_lhs (call, ddvar);
8337 gimple_seq_add_stmt (seq, call);
8340 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8341 the loop clauses, from which we extract reductions. Initialize
8342 HEAD and TAIL. */
8344 static void
8345 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8346 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8348 bool inner = false;
8349 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8350 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8352 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8354 if (private_marker)
8356 gimple_set_location (private_marker, loc);
8357 gimple_call_set_lhs (private_marker, ddvar);
8358 gimple_call_set_arg (private_marker, 1, ddvar);
8361 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8362 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8364 gcc_assert (count);
8365 for (unsigned done = 1; count; count--, done++)
8367 gimple_seq fork_seq = NULL;
8368 gimple_seq join_seq = NULL;
8370 tree place = build_int_cst (integer_type_node, -1);
8371 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8372 fork_kind, ddvar, place);
8373 gimple_set_location (fork, loc);
8374 gimple_set_lhs (fork, ddvar);
8376 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8377 join_kind, ddvar, place);
8378 gimple_set_location (join, loc);
8379 gimple_set_lhs (join, ddvar);
8381 /* Mark the beginning of this level sequence. */
8382 if (inner)
8383 lower_oacc_loop_marker (loc, ddvar, true,
8384 build_int_cst (integer_type_node, count),
8385 &fork_seq);
8386 lower_oacc_loop_marker (loc, ddvar, false,
8387 build_int_cst (integer_type_node, done),
8388 &join_seq);
8390 lower_oacc_reductions (loc, clauses, place, inner,
8391 fork, (count == 1) ? private_marker : NULL,
8392 join, &fork_seq, &join_seq, ctx);
8394 /* Append this level to head. */
8395 gimple_seq_add_seq (head, fork_seq);
8396 /* Prepend it to tail. */
8397 gimple_seq_add_seq (&join_seq, *tail);
8398 *tail = join_seq;
8400 inner = true;
8403 /* Mark the end of the sequence. */
8404 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8405 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8408 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8409 catch handler and return it. This prevents programs from violating the
8410 structured block semantics with throws. */
8412 static gimple_seq
8413 maybe_catch_exception (gimple_seq body)
8415 gimple *g;
8416 tree decl;
8418 if (!flag_exceptions)
8419 return body;
8421 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8422 decl = lang_hooks.eh_protect_cleanup_actions ();
8423 else
8424 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8426 g = gimple_build_eh_must_not_throw (decl);
8427 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8428 GIMPLE_TRY_CATCH);
8430 return gimple_seq_alloc_with_stmt (g);
8434 /* Routines to lower OMP directives into OMP-GIMPLE. */
8436 /* If ctx is a worksharing context inside of a cancellable parallel
8437 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8438 and conditional branch to parallel's cancel_label to handle
8439 cancellation in the implicit barrier. */
8441 static void
8442 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8443 gimple_seq *body)
8445 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8446 if (gimple_omp_return_nowait_p (omp_return))
8447 return;
8448 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8449 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8450 && outer->cancellable)
8452 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8453 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8454 tree lhs = create_tmp_var (c_bool_type);
8455 gimple_omp_return_set_lhs (omp_return, lhs);
8456 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8457 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8458 fold_convert (c_bool_type,
8459 boolean_false_node),
8460 outer->cancel_label, fallthru_label);
8461 gimple_seq_add_stmt (body, g);
8462 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8464 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8465 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8466 return;
8469 /* Find the first task_reduction or reduction clause or return NULL
8470 if there are none. */
8472 static inline tree
8473 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8474 enum omp_clause_code ccode)
8476 while (1)
8478 clauses = omp_find_clause (clauses, ccode);
8479 if (clauses == NULL_TREE)
8480 return NULL_TREE;
8481 if (ccode != OMP_CLAUSE_REDUCTION
8482 || code == OMP_TASKLOOP
8483 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8484 return clauses;
8485 clauses = OMP_CLAUSE_CHAIN (clauses);
8489 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8490 gimple_seq *, gimple_seq *);
8492 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8493 CTX is the enclosing OMP context for the current statement. */
8495 static void
8496 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8498 tree block, control;
8499 gimple_stmt_iterator tgsi;
8500 gomp_sections *stmt;
8501 gimple *t;
8502 gbind *new_stmt, *bind;
8503 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8505 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8507 push_gimplify_context ();
8509 dlist = NULL;
8510 ilist = NULL;
8512 tree rclauses
8513 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8514 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8515 tree rtmp = NULL_TREE;
8516 if (rclauses)
8518 tree type = build_pointer_type (pointer_sized_int_node);
8519 tree temp = create_tmp_var (type);
8520 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8521 OMP_CLAUSE_DECL (c) = temp;
8522 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8523 gimple_omp_sections_set_clauses (stmt, c);
8524 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8525 gimple_omp_sections_clauses (stmt),
8526 &ilist, &tred_dlist);
8527 rclauses = c;
8528 rtmp = make_ssa_name (type);
8529 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8532 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8533 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8535 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8536 &ilist, &dlist, ctx, NULL);
8538 control = create_tmp_var (unsigned_type_node, ".section");
8539 gimple_omp_sections_set_control (stmt, control);
8541 new_body = gimple_omp_body (stmt);
8542 gimple_omp_set_body (stmt, NULL);
8543 tgsi = gsi_start (new_body);
8544 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8546 omp_context *sctx;
8547 gimple *sec_start;
8549 sec_start = gsi_stmt (tgsi);
8550 sctx = maybe_lookup_ctx (sec_start);
8551 gcc_assert (sctx);
8553 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8554 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8555 GSI_CONTINUE_LINKING);
8556 gimple_omp_set_body (sec_start, NULL);
8558 if (gsi_one_before_end_p (tgsi))
8560 gimple_seq l = NULL;
8561 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8562 &ilist, &l, &clist, ctx);
8563 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8564 gimple_omp_section_set_last (sec_start);
8567 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8568 GSI_CONTINUE_LINKING);
8571 block = make_node (BLOCK);
8572 bind = gimple_build_bind (NULL, new_body, block);
8574 olist = NULL;
8575 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8576 &clist, ctx);
8577 if (clist)
8579 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8580 gcall *g = gimple_build_call (fndecl, 0);
8581 gimple_seq_add_stmt (&olist, g);
8582 gimple_seq_add_seq (&olist, clist);
8583 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8584 g = gimple_build_call (fndecl, 0);
8585 gimple_seq_add_stmt (&olist, g);
8588 block = make_node (BLOCK);
8589 new_stmt = gimple_build_bind (NULL, NULL, block);
8590 gsi_replace (gsi_p, new_stmt, true);
8592 pop_gimplify_context (new_stmt);
8593 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8594 BLOCK_VARS (block) = gimple_bind_vars (bind);
8595 if (BLOCK_VARS (block))
8596 TREE_USED (block) = 1;
8598 new_body = NULL;
8599 gimple_seq_add_seq (&new_body, ilist);
8600 gimple_seq_add_stmt (&new_body, stmt);
8601 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8602 gimple_seq_add_stmt (&new_body, bind);
8604 t = gimple_build_omp_continue (control, control);
8605 gimple_seq_add_stmt (&new_body, t);
8607 gimple_seq_add_seq (&new_body, olist);
8608 if (ctx->cancellable)
8609 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8610 gimple_seq_add_seq (&new_body, dlist);
8612 new_body = maybe_catch_exception (new_body);
8614 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8615 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8616 t = gimple_build_omp_return (nowait);
8617 gimple_seq_add_stmt (&new_body, t);
8618 gimple_seq_add_seq (&new_body, tred_dlist);
8619 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8621 if (rclauses)
8622 OMP_CLAUSE_DECL (rclauses) = rtmp;
8624 gimple_bind_set_body (new_stmt, new_body);
8628 /* A subroutine of lower_omp_single. Expand the simple form of
8629 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8631 if (GOMP_single_start ())
8632 BODY;
8633 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8635 FIXME. It may be better to delay expanding the logic of this until
8636 pass_expand_omp. The expanded logic may make the job more difficult
8637 to a synchronization analysis pass. */
8639 static void
8640 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8642 location_t loc = gimple_location (single_stmt);
8643 tree tlabel = create_artificial_label (loc);
8644 tree flabel = create_artificial_label (loc);
8645 gimple *call, *cond;
8646 tree lhs, decl;
8648 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8649 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8650 call = gimple_build_call (decl, 0);
8651 gimple_call_set_lhs (call, lhs);
8652 gimple_seq_add_stmt (pre_p, call);
8654 cond = gimple_build_cond (EQ_EXPR, lhs,
8655 fold_convert_loc (loc, TREE_TYPE (lhs),
8656 boolean_true_node),
8657 tlabel, flabel);
8658 gimple_seq_add_stmt (pre_p, cond);
8659 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8660 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8661 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8665 /* A subroutine of lower_omp_single. Expand the simple form of
8666 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8668 #pragma omp single copyprivate (a, b, c)
8670 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8673 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8675 BODY;
8676 copyout.a = a;
8677 copyout.b = b;
8678 copyout.c = c;
8679 GOMP_single_copy_end (&copyout);
8681 else
8683 a = copyout_p->a;
8684 b = copyout_p->b;
8685 c = copyout_p->c;
8687 GOMP_barrier ();
8690 FIXME. It may be better to delay expanding the logic of this until
8691 pass_expand_omp. The expanded logic may make the job more difficult
8692 to a synchronization analysis pass. */
8694 static void
8695 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8696 omp_context *ctx)
8698 tree ptr_type, t, l0, l1, l2, bfn_decl;
8699 gimple_seq copyin_seq;
8700 location_t loc = gimple_location (single_stmt);
8702 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8704 ptr_type = build_pointer_type (ctx->record_type);
8705 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8707 l0 = create_artificial_label (loc);
8708 l1 = create_artificial_label (loc);
8709 l2 = create_artificial_label (loc);
8711 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8712 t = build_call_expr_loc (loc, bfn_decl, 0);
8713 t = fold_convert_loc (loc, ptr_type, t);
8714 gimplify_assign (ctx->receiver_decl, t, pre_p);
8716 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8717 build_int_cst (ptr_type, 0));
8718 t = build3 (COND_EXPR, void_type_node, t,
8719 build_and_jump (&l0), build_and_jump (&l1));
8720 gimplify_and_add (t, pre_p);
8722 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8724 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8726 copyin_seq = NULL;
8727 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8728 &copyin_seq, ctx);
8730 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8731 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8732 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8733 gimplify_and_add (t, pre_p);
8735 t = build_and_jump (&l2);
8736 gimplify_and_add (t, pre_p);
8738 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8740 gimple_seq_add_seq (pre_p, copyin_seq);
8742 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8746 /* Expand code for an OpenMP single directive. */
8748 static void
8749 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8751 tree block;
8752 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8753 gbind *bind;
8754 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8756 push_gimplify_context ();
8758 block = make_node (BLOCK);
8759 bind = gimple_build_bind (NULL, NULL, block);
8760 gsi_replace (gsi_p, bind, true);
8761 bind_body = NULL;
8762 dlist = NULL;
8763 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8764 &bind_body, &dlist, ctx, NULL);
8765 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8767 gimple_seq_add_stmt (&bind_body, single_stmt);
8769 if (ctx->record_type)
8770 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8771 else
8772 lower_omp_single_simple (single_stmt, &bind_body);
8774 gimple_omp_set_body (single_stmt, NULL);
8776 gimple_seq_add_seq (&bind_body, dlist);
8778 bind_body = maybe_catch_exception (bind_body);
8780 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8781 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8782 gimple *g = gimple_build_omp_return (nowait);
8783 gimple_seq_add_stmt (&bind_body_tail, g);
8784 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8785 if (ctx->record_type)
8787 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8788 tree clobber = build_clobber (ctx->record_type);
8789 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8790 clobber), GSI_SAME_STMT);
8792 gimple_seq_add_seq (&bind_body, bind_body_tail);
8793 gimple_bind_set_body (bind, bind_body);
8795 pop_gimplify_context (bind);
8797 gimple_bind_append_vars (bind, ctx->block_vars);
8798 BLOCK_VARS (block) = ctx->block_vars;
8799 if (BLOCK_VARS (block))
8800 TREE_USED (block) = 1;
8804 /* Lower code for an OMP scope directive. */
8806 static void
8807 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8809 tree block;
8810 gimple *scope_stmt = gsi_stmt (*gsi_p);
8811 gbind *bind;
8812 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8813 gimple_seq tred_dlist = NULL;
8815 push_gimplify_context ();
8817 block = make_node (BLOCK);
8818 bind = gimple_build_bind (NULL, NULL, block);
8819 gsi_replace (gsi_p, bind, true);
8820 bind_body = NULL;
8821 dlist = NULL;
8823 tree rclauses
8824 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8825 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8826 if (rclauses)
8828 tree type = build_pointer_type (pointer_sized_int_node);
8829 tree temp = create_tmp_var (type);
8830 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8831 OMP_CLAUSE_DECL (c) = temp;
8832 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8833 gimple_omp_scope_set_clauses (scope_stmt, c);
8834 lower_omp_task_reductions (ctx, OMP_SCOPE,
8835 gimple_omp_scope_clauses (scope_stmt),
8836 &bind_body, &tred_dlist);
8837 rclauses = c;
8838 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8839 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8840 gimple_seq_add_stmt (&bind_body, stmt);
8843 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8844 &bind_body, &dlist, ctx, NULL);
8845 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8847 gimple_seq_add_stmt (&bind_body, scope_stmt);
8849 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8851 gimple_omp_set_body (scope_stmt, NULL);
8853 gimple_seq clist = NULL;
8854 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8855 &bind_body, &clist, ctx);
8856 if (clist)
8858 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8859 gcall *g = gimple_build_call (fndecl, 0);
8860 gimple_seq_add_stmt (&bind_body, g);
8861 gimple_seq_add_seq (&bind_body, clist);
8862 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8863 g = gimple_build_call (fndecl, 0);
8864 gimple_seq_add_stmt (&bind_body, g);
8867 gimple_seq_add_seq (&bind_body, dlist);
8869 bind_body = maybe_catch_exception (bind_body);
8871 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8872 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8873 gimple *g = gimple_build_omp_return (nowait);
8874 gimple_seq_add_stmt (&bind_body_tail, g);
8875 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8876 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8877 if (ctx->record_type)
8879 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8880 tree clobber = build_clobber (ctx->record_type);
8881 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8882 clobber), GSI_SAME_STMT);
8884 gimple_seq_add_seq (&bind_body, bind_body_tail);
8886 gimple_bind_set_body (bind, bind_body);
8888 pop_gimplify_context (bind);
8890 gimple_bind_append_vars (bind, ctx->block_vars);
8891 BLOCK_VARS (block) = ctx->block_vars;
8892 if (BLOCK_VARS (block))
8893 TREE_USED (block) = 1;
8895 /* Expand code for an OpenMP master or masked directive. */
8897 static void
8898 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8900 tree block, lab = NULL, x, bfn_decl;
8901 gimple *stmt = gsi_stmt (*gsi_p);
8902 gbind *bind;
8903 location_t loc = gimple_location (stmt);
8904 gimple_seq tseq;
8905 tree filter = integer_zero_node;
8907 push_gimplify_context ();
8909 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8911 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8912 OMP_CLAUSE_FILTER);
8913 if (filter)
8914 filter = fold_convert (integer_type_node,
8915 OMP_CLAUSE_FILTER_EXPR (filter));
8916 else
8917 filter = integer_zero_node;
8919 block = make_node (BLOCK);
8920 bind = gimple_build_bind (NULL, NULL, block);
8921 gsi_replace (gsi_p, bind, true);
8922 gimple_bind_add_stmt (bind, stmt);
8924 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8925 x = build_call_expr_loc (loc, bfn_decl, 0);
8926 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8927 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8928 tseq = NULL;
8929 gimplify_and_add (x, &tseq);
8930 gimple_bind_add_seq (bind, tseq);
8932 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8933 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8934 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8935 gimple_omp_set_body (stmt, NULL);
8937 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8939 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8941 pop_gimplify_context (bind);
8943 gimple_bind_append_vars (bind, ctx->block_vars);
8944 BLOCK_VARS (block) = ctx->block_vars;
8947 /* Helper function for lower_omp_task_reductions. For a specific PASS
8948 find out the current clause it should be processed, or return false
8949 if all have been processed already. */
8951 static inline bool
8952 omp_task_reduction_iterate (int pass, enum tree_code code,
8953 enum omp_clause_code ccode, tree *c, tree *decl,
8954 tree *type, tree *next)
8956 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8958 if (ccode == OMP_CLAUSE_REDUCTION
8959 && code != OMP_TASKLOOP
8960 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8961 continue;
8962 *decl = OMP_CLAUSE_DECL (*c);
8963 *type = TREE_TYPE (*decl);
8964 if (TREE_CODE (*decl) == MEM_REF)
8966 if (pass != 1)
8967 continue;
8969 else
8971 if (omp_privatize_by_reference (*decl))
8972 *type = TREE_TYPE (*type);
8973 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8974 continue;
8976 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8977 return true;
8979 *decl = NULL_TREE;
8980 *type = NULL_TREE;
8981 *next = NULL_TREE;
8982 return false;
8985 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8986 OMP_TASKGROUP only with task modifier). Register mapping of those in
8987 START sequence and reducing them and unregister them in the END sequence. */
8989 static void
8990 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8991 gimple_seq *start, gimple_seq *end)
8993 enum omp_clause_code ccode
8994 = (code == OMP_TASKGROUP
8995 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8996 tree cancellable = NULL_TREE;
8997 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8998 if (clauses == NULL_TREE)
8999 return;
9000 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9002 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9003 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9004 && outer->cancellable)
9006 cancellable = error_mark_node;
9007 break;
9009 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9010 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9011 break;
9013 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9014 tree *last = &TYPE_FIELDS (record_type);
9015 unsigned cnt = 0;
9016 if (cancellable)
9018 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9019 ptr_type_node);
9020 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9021 integer_type_node);
9022 *last = field;
9023 DECL_CHAIN (field) = ifield;
9024 last = &DECL_CHAIN (ifield);
9025 DECL_CONTEXT (field) = record_type;
9026 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9027 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9028 DECL_CONTEXT (ifield) = record_type;
9029 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9030 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9032 for (int pass = 0; pass < 2; pass++)
9034 tree decl, type, next;
9035 for (tree c = clauses;
9036 omp_task_reduction_iterate (pass, code, ccode,
9037 &c, &decl, &type, &next); c = next)
9039 ++cnt;
9040 tree new_type = type;
9041 if (ctx->outer)
9042 new_type = remap_type (type, &ctx->outer->cb);
9043 tree field
9044 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9045 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9046 new_type);
9047 if (DECL_P (decl) && type == TREE_TYPE (decl))
9049 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9050 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9051 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9053 else
9054 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9055 DECL_CONTEXT (field) = record_type;
9056 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9057 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9058 *last = field;
9059 last = &DECL_CHAIN (field);
9060 tree bfield
9061 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9062 boolean_type_node);
9063 DECL_CONTEXT (bfield) = record_type;
9064 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9065 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9066 *last = bfield;
9067 last = &DECL_CHAIN (bfield);
9070 *last = NULL_TREE;
9071 layout_type (record_type);
9073 /* Build up an array which registers with the runtime all the reductions
9074 and deregisters them at the end. Format documented in libgomp/task.c. */
9075 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9076 tree avar = create_tmp_var_raw (atype);
9077 gimple_add_tmp_var (avar);
9078 TREE_ADDRESSABLE (avar) = 1;
9079 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9080 NULL_TREE, NULL_TREE);
9081 tree t = build_int_cst (pointer_sized_int_node, cnt);
9082 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9083 gimple_seq seq = NULL;
9084 tree sz = fold_convert (pointer_sized_int_node,
9085 TYPE_SIZE_UNIT (record_type));
9086 int cachesz = 64;
9087 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9088 build_int_cst (pointer_sized_int_node, cachesz - 1));
9089 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9090 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9091 ctx->task_reductions.create (1 + cnt);
9092 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9093 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9094 ? sz : NULL_TREE);
9095 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9096 gimple_seq_add_seq (start, seq);
9097 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9098 NULL_TREE, NULL_TREE);
9099 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9100 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9101 NULL_TREE, NULL_TREE);
9102 t = build_int_cst (pointer_sized_int_node,
9103 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9104 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9105 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9106 NULL_TREE, NULL_TREE);
9107 t = build_int_cst (pointer_sized_int_node, -1);
9108 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9109 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9110 NULL_TREE, NULL_TREE);
9111 t = build_int_cst (pointer_sized_int_node, 0);
9112 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9114 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9115 and for each task reduction checks a bool right after the private variable
9116 within that thread's chunk; if the bool is clear, it hasn't been
9117 initialized and thus isn't going to be reduced nor destructed, otherwise
9118 reduce and destruct it. */
9119 tree idx = create_tmp_var (size_type_node);
9120 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9121 tree num_thr_sz = create_tmp_var (size_type_node);
9122 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9123 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9124 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9125 gimple *g;
9126 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9128 /* For worksharing constructs or scope, only perform it in the master
9129 thread, with the exception of cancelled implicit barriers - then only
9130 handle the current thread. */
9131 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9132 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9133 tree thr_num = create_tmp_var (integer_type_node);
9134 g = gimple_build_call (t, 0);
9135 gimple_call_set_lhs (g, thr_num);
9136 gimple_seq_add_stmt (end, g);
9137 if (cancellable)
9139 tree c;
9140 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9141 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9142 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9143 if (code == OMP_FOR)
9144 c = gimple_omp_for_clauses (ctx->stmt);
9145 else if (code == OMP_SECTIONS)
9146 c = gimple_omp_sections_clauses (ctx->stmt);
9147 else /* if (code == OMP_SCOPE) */
9148 c = gimple_omp_scope_clauses (ctx->stmt);
9149 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9150 cancellable = c;
9151 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9152 lab5, lab6);
9153 gimple_seq_add_stmt (end, g);
9154 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9155 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9156 gimple_seq_add_stmt (end, g);
9157 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9158 build_one_cst (TREE_TYPE (idx)));
9159 gimple_seq_add_stmt (end, g);
9160 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9161 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9163 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9164 gimple_seq_add_stmt (end, g);
9165 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9167 if (code != OMP_PARALLEL)
9169 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9170 tree num_thr = create_tmp_var (integer_type_node);
9171 g = gimple_build_call (t, 0);
9172 gimple_call_set_lhs (g, num_thr);
9173 gimple_seq_add_stmt (end, g);
9174 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9175 gimple_seq_add_stmt (end, g);
9176 if (cancellable)
9177 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9179 else
9181 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9182 OMP_CLAUSE__REDUCTEMP_);
9183 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9184 t = fold_convert (size_type_node, t);
9185 gimplify_assign (num_thr_sz, t, end);
9187 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9188 NULL_TREE, NULL_TREE);
9189 tree data = create_tmp_var (pointer_sized_int_node);
9190 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9191 if (code == OMP_TASKLOOP)
9193 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9194 g = gimple_build_cond (NE_EXPR, data,
9195 build_zero_cst (pointer_sized_int_node),
9196 lab1, lab7);
9197 gimple_seq_add_stmt (end, g);
9199 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9200 tree ptr;
9201 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9202 ptr = create_tmp_var (build_pointer_type (record_type));
9203 else
9204 ptr = create_tmp_var (ptr_type_node);
9205 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9207 tree field = TYPE_FIELDS (record_type);
9208 cnt = 0;
9209 if (cancellable)
9210 field = DECL_CHAIN (DECL_CHAIN (field));
9211 for (int pass = 0; pass < 2; pass++)
9213 tree decl, type, next;
9214 for (tree c = clauses;
9215 omp_task_reduction_iterate (pass, code, ccode,
9216 &c, &decl, &type, &next); c = next)
9218 tree var = decl, ref;
9219 if (TREE_CODE (decl) == MEM_REF)
9221 var = TREE_OPERAND (var, 0);
9222 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9223 var = TREE_OPERAND (var, 0);
9224 tree v = var;
9225 if (TREE_CODE (var) == ADDR_EXPR)
9226 var = TREE_OPERAND (var, 0);
9227 else if (TREE_CODE (var) == INDIRECT_REF)
9228 var = TREE_OPERAND (var, 0);
9229 tree orig_var = var;
9230 if (is_variable_sized (var))
9232 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9233 var = DECL_VALUE_EXPR (var);
9234 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9235 var = TREE_OPERAND (var, 0);
9236 gcc_assert (DECL_P (var));
9238 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9239 if (orig_var != var)
9240 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9241 else if (TREE_CODE (v) == ADDR_EXPR)
9242 t = build_fold_addr_expr (t);
9243 else if (TREE_CODE (v) == INDIRECT_REF)
9244 t = build_fold_indirect_ref (t);
9245 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9247 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9248 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9249 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9251 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9252 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9253 fold_convert (size_type_node,
9254 TREE_OPERAND (decl, 1)));
9256 else
9258 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9259 if (!omp_privatize_by_reference (decl))
9260 t = build_fold_addr_expr (t);
9262 t = fold_convert (pointer_sized_int_node, t);
9263 seq = NULL;
9264 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9265 gimple_seq_add_seq (start, seq);
9266 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9267 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9268 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9269 t = unshare_expr (byte_position (field));
9270 t = fold_convert (pointer_sized_int_node, t);
9271 ctx->task_reduction_map->put (c, cnt);
9272 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9273 ? t : NULL_TREE);
9274 seq = NULL;
9275 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9276 gimple_seq_add_seq (start, seq);
9277 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9278 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9279 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9281 tree bfield = DECL_CHAIN (field);
9282 tree cond;
9283 if (code == OMP_PARALLEL
9284 || code == OMP_FOR
9285 || code == OMP_SECTIONS
9286 || code == OMP_SCOPE)
9287 /* In parallel, worksharing or scope all threads unconditionally
9288 initialize all their task reduction private variables. */
9289 cond = boolean_true_node;
9290 else if (TREE_TYPE (ptr) == ptr_type_node)
9292 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9293 unshare_expr (byte_position (bfield)));
9294 seq = NULL;
9295 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9296 gimple_seq_add_seq (end, seq);
9297 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9298 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9299 build_int_cst (pbool, 0));
9301 else
9302 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9303 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9304 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9305 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9306 tree condv = create_tmp_var (boolean_type_node);
9307 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9308 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9309 lab3, lab4);
9310 gimple_seq_add_stmt (end, g);
9311 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9312 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9314 /* If this reduction doesn't need destruction and parallel
9315 has been cancelled, there is nothing to do for this
9316 reduction, so jump around the merge operation. */
9317 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9318 g = gimple_build_cond (NE_EXPR, cancellable,
9319 build_zero_cst (TREE_TYPE (cancellable)),
9320 lab4, lab5);
9321 gimple_seq_add_stmt (end, g);
9322 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9325 tree new_var;
9326 if (TREE_TYPE (ptr) == ptr_type_node)
9328 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9329 unshare_expr (byte_position (field)));
9330 seq = NULL;
9331 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9332 gimple_seq_add_seq (end, seq);
9333 tree pbool = build_pointer_type (TREE_TYPE (field));
9334 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9335 build_int_cst (pbool, 0));
9337 else
9338 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9339 build_simple_mem_ref (ptr), field, NULL_TREE);
9341 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9342 if (TREE_CODE (decl) != MEM_REF
9343 && omp_privatize_by_reference (decl))
9344 ref = build_simple_mem_ref (ref);
9345 /* reduction(-:var) sums up the partial results, so it acts
9346 identically to reduction(+:var). */
9347 if (rcode == MINUS_EXPR)
9348 rcode = PLUS_EXPR;
9349 if (TREE_CODE (decl) == MEM_REF)
9351 tree type = TREE_TYPE (new_var);
9352 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9353 tree i = create_tmp_var (TREE_TYPE (v));
9354 tree ptype = build_pointer_type (TREE_TYPE (type));
9355 if (DECL_P (v))
9357 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9358 tree vv = create_tmp_var (TREE_TYPE (v));
9359 gimplify_assign (vv, v, start);
9360 v = vv;
9362 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9363 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9364 new_var = build_fold_addr_expr (new_var);
9365 new_var = fold_convert (ptype, new_var);
9366 ref = fold_convert (ptype, ref);
9367 tree m = create_tmp_var (ptype);
9368 gimplify_assign (m, new_var, end);
9369 new_var = m;
9370 m = create_tmp_var (ptype);
9371 gimplify_assign (m, ref, end);
9372 ref = m;
9373 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9374 tree body = create_artificial_label (UNKNOWN_LOCATION);
9375 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9376 gimple_seq_add_stmt (end, gimple_build_label (body));
9377 tree priv = build_simple_mem_ref (new_var);
9378 tree out = build_simple_mem_ref (ref);
9379 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9381 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9382 tree decl_placeholder
9383 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9384 tree lab6 = NULL_TREE;
9385 if (cancellable)
9387 /* If this reduction needs destruction and parallel
9388 has been cancelled, jump around the merge operation
9389 to the destruction. */
9390 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9391 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9392 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9393 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9394 lab6, lab5);
9395 gimple_seq_add_stmt (end, g);
9396 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9398 SET_DECL_VALUE_EXPR (placeholder, out);
9399 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9400 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9401 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9402 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9403 gimple_seq_add_seq (end,
9404 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9405 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9406 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9408 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9409 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9411 if (cancellable)
9412 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9413 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9414 if (x)
9416 gimple_seq tseq = NULL;
9417 gimplify_stmt (&x, &tseq);
9418 gimple_seq_add_seq (end, tseq);
9421 else
9423 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9424 out = unshare_expr (out);
9425 gimplify_assign (out, x, end);
9427 gimple *g
9428 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9429 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9430 gimple_seq_add_stmt (end, g);
9431 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9432 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9433 gimple_seq_add_stmt (end, g);
9434 g = gimple_build_assign (i, PLUS_EXPR, i,
9435 build_int_cst (TREE_TYPE (i), 1));
9436 gimple_seq_add_stmt (end, g);
9437 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9438 gimple_seq_add_stmt (end, g);
9439 gimple_seq_add_stmt (end, gimple_build_label (endl));
9441 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9443 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9444 tree oldv = NULL_TREE;
9445 tree lab6 = NULL_TREE;
9446 if (cancellable)
9448 /* If this reduction needs destruction and parallel
9449 has been cancelled, jump around the merge operation
9450 to the destruction. */
9451 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9452 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9453 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9454 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9455 lab6, lab5);
9456 gimple_seq_add_stmt (end, g);
9457 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9459 if (omp_privatize_by_reference (decl)
9460 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9461 TREE_TYPE (ref)))
9462 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9463 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9464 tree refv = create_tmp_var (TREE_TYPE (ref));
9465 gimplify_assign (refv, ref, end);
9466 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9467 SET_DECL_VALUE_EXPR (placeholder, ref);
9468 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9469 tree d = maybe_lookup_decl (decl, ctx);
9470 gcc_assert (d);
9471 if (DECL_HAS_VALUE_EXPR_P (d))
9472 oldv = DECL_VALUE_EXPR (d);
9473 if (omp_privatize_by_reference (var))
9475 tree v = fold_convert (TREE_TYPE (d),
9476 build_fold_addr_expr (new_var));
9477 SET_DECL_VALUE_EXPR (d, v);
9479 else
9480 SET_DECL_VALUE_EXPR (d, new_var);
9481 DECL_HAS_VALUE_EXPR_P (d) = 1;
9482 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9483 if (oldv)
9484 SET_DECL_VALUE_EXPR (d, oldv);
9485 else
9487 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9488 DECL_HAS_VALUE_EXPR_P (d) = 0;
9490 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9491 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9492 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9493 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9494 if (cancellable)
9495 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9496 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9497 if (x)
9499 gimple_seq tseq = NULL;
9500 gimplify_stmt (&x, &tseq);
9501 gimple_seq_add_seq (end, tseq);
9504 else
9506 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9507 ref = unshare_expr (ref);
9508 gimplify_assign (ref, x, end);
9510 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9511 ++cnt;
9512 field = DECL_CHAIN (bfield);
9516 if (code == OMP_TASKGROUP)
9518 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9519 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9520 gimple_seq_add_stmt (start, g);
9522 else
9524 tree c;
9525 if (code == OMP_FOR)
9526 c = gimple_omp_for_clauses (ctx->stmt);
9527 else if (code == OMP_SECTIONS)
9528 c = gimple_omp_sections_clauses (ctx->stmt);
9529 else if (code == OMP_SCOPE)
9530 c = gimple_omp_scope_clauses (ctx->stmt);
9531 else
9532 c = gimple_omp_taskreg_clauses (ctx->stmt);
9533 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9534 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9535 build_fold_addr_expr (avar));
9536 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9539 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9540 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9541 size_one_node));
9542 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9543 gimple_seq_add_stmt (end, g);
9544 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9545 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9547 enum built_in_function bfn
9548 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9549 t = builtin_decl_explicit (bfn);
9550 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9551 tree arg;
9552 if (cancellable)
9554 arg = create_tmp_var (c_bool_type);
9555 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9556 cancellable));
9558 else
9559 arg = build_int_cst (c_bool_type, 0);
9560 g = gimple_build_call (t, 1, arg);
9562 else
9564 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9565 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9567 gimple_seq_add_stmt (end, g);
9568 if (lab7)
9569 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9570 t = build_constructor (atype, NULL);
9571 TREE_THIS_VOLATILE (t) = 1;
9572 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9575 /* Expand code for an OpenMP taskgroup directive. */
9577 static void
9578 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9580 gimple *stmt = gsi_stmt (*gsi_p);
9581 gcall *x;
9582 gbind *bind;
9583 gimple_seq dseq = NULL;
9584 tree block = make_node (BLOCK);
9586 bind = gimple_build_bind (NULL, NULL, block);
9587 gsi_replace (gsi_p, bind, true);
9588 gimple_bind_add_stmt (bind, stmt);
9590 push_gimplify_context ();
9592 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9594 gimple_bind_add_stmt (bind, x);
9596 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9597 gimple_omp_taskgroup_clauses (stmt),
9598 gimple_bind_body_ptr (bind), &dseq);
9600 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9601 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9602 gimple_omp_set_body (stmt, NULL);
9604 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9605 gimple_bind_add_seq (bind, dseq);
9607 pop_gimplify_context (bind);
9609 gimple_bind_append_vars (bind, ctx->block_vars);
9610 BLOCK_VARS (block) = ctx->block_vars;
9614 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9616 static void
9617 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9618 omp_context *ctx)
9620 struct omp_for_data fd;
9621 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9622 return;
9624 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9625 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9626 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9627 if (!fd.ordered)
9628 return;
9630 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9631 tree c = gimple_omp_ordered_clauses (ord_stmt);
9632 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9633 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9635 /* Merge depend clauses from multiple adjacent
9636 #pragma omp ordered depend(sink:...) constructs
9637 into one #pragma omp ordered depend(sink:...), so that
9638 we can optimize them together. */
9639 gimple_stmt_iterator gsi = *gsi_p;
9640 gsi_next (&gsi);
9641 while (!gsi_end_p (gsi))
9643 gimple *stmt = gsi_stmt (gsi);
9644 if (is_gimple_debug (stmt)
9645 || gimple_code (stmt) == GIMPLE_NOP)
9647 gsi_next (&gsi);
9648 continue;
9650 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9651 break;
9652 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9653 c = gimple_omp_ordered_clauses (ord_stmt2);
9654 if (c == NULL_TREE
9655 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9656 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9657 break;
9658 while (*list_p)
9659 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9660 *list_p = c;
9661 gsi_remove (&gsi, true);
9665 /* Canonicalize sink dependence clauses into one folded clause if
9666 possible.
9668 The basic algorithm is to create a sink vector whose first
9669 element is the GCD of all the first elements, and whose remaining
9670 elements are the minimum of the subsequent columns.
9672 We ignore dependence vectors whose first element is zero because
9673 such dependencies are known to be executed by the same thread.
9675 We take into account the direction of the loop, so a minimum
9676 becomes a maximum if the loop is iterating forwards. We also
9677 ignore sink clauses where the loop direction is unknown, or where
9678 the offsets are clearly invalid because they are not a multiple
9679 of the loop increment.
9681 For example:
9683 #pragma omp for ordered(2)
9684 for (i=0; i < N; ++i)
9685 for (j=0; j < M; ++j)
9687 #pragma omp ordered \
9688 depend(sink:i-8,j-2) \
9689 depend(sink:i,j-1) \ // Completely ignored because i+0.
9690 depend(sink:i-4,j-3) \
9691 depend(sink:i-6,j-4)
9692 #pragma omp ordered depend(source)
9695 Folded clause is:
9697 depend(sink:-gcd(8,4,6),-min(2,3,4))
9698 -or-
9699 depend(sink:-2,-2)
9702 /* FIXME: Computing GCD's where the first element is zero is
9703 non-trivial in the presence of collapsed loops. Do this later. */
9704 if (fd.collapse > 1)
9705 return;
9707 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9709 /* wide_int is not a POD so it must be default-constructed. */
9710 for (unsigned i = 0; i != 2 * len - 1; ++i)
9711 new (static_cast<void*>(folded_deps + i)) wide_int ();
9713 tree folded_dep = NULL_TREE;
9714 /* TRUE if the first dimension's offset is negative. */
9715 bool neg_offset_p = false;
9717 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9718 unsigned int i;
9719 while ((c = *list_p) != NULL)
9721 bool remove = false;
9723 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9724 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9725 goto next_ordered_clause;
9727 tree vec;
9728 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9729 vec && TREE_CODE (vec) == TREE_LIST;
9730 vec = TREE_CHAIN (vec), ++i)
9732 gcc_assert (i < len);
9734 /* omp_extract_for_data has canonicalized the condition. */
9735 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9736 || fd.loops[i].cond_code == GT_EXPR);
9737 bool forward = fd.loops[i].cond_code == LT_EXPR;
9738 bool maybe_lexically_later = true;
9740 /* While the committee makes up its mind, bail if we have any
9741 non-constant steps. */
9742 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9743 goto lower_omp_ordered_ret;
9745 tree itype = TREE_TYPE (TREE_VALUE (vec));
9746 if (POINTER_TYPE_P (itype))
9747 itype = sizetype;
9748 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9749 TYPE_PRECISION (itype),
9750 TYPE_SIGN (itype));
9752 /* Ignore invalid offsets that are not multiples of the step. */
9753 if (!wi::multiple_of_p (wi::abs (offset),
9754 wi::abs (wi::to_wide (fd.loops[i].step)),
9755 UNSIGNED))
9757 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9758 "ignoring sink clause with offset that is not "
9759 "a multiple of the loop step");
9760 remove = true;
9761 goto next_ordered_clause;
9764 /* Calculate the first dimension. The first dimension of
9765 the folded dependency vector is the GCD of the first
9766 elements, while ignoring any first elements whose offset
9767 is 0. */
9768 if (i == 0)
9770 /* Ignore dependence vectors whose first dimension is 0. */
9771 if (offset == 0)
9773 remove = true;
9774 goto next_ordered_clause;
9776 else
9778 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9780 error_at (OMP_CLAUSE_LOCATION (c),
9781 "first offset must be in opposite direction "
9782 "of loop iterations");
9783 goto lower_omp_ordered_ret;
9785 if (forward)
9786 offset = -offset;
9787 neg_offset_p = forward;
9788 /* Initialize the first time around. */
9789 if (folded_dep == NULL_TREE)
9791 folded_dep = c;
9792 folded_deps[0] = offset;
9794 else
9795 folded_deps[0] = wi::gcd (folded_deps[0],
9796 offset, UNSIGNED);
9799 /* Calculate minimum for the remaining dimensions. */
9800 else
9802 folded_deps[len + i - 1] = offset;
9803 if (folded_dep == c)
9804 folded_deps[i] = offset;
9805 else if (maybe_lexically_later
9806 && !wi::eq_p (folded_deps[i], offset))
9808 if (forward ^ wi::gts_p (folded_deps[i], offset))
9810 unsigned int j;
9811 folded_dep = c;
9812 for (j = 1; j <= i; j++)
9813 folded_deps[j] = folded_deps[len + j - 1];
9815 else
9816 maybe_lexically_later = false;
9820 gcc_assert (i == len);
9822 remove = true;
9824 next_ordered_clause:
9825 if (remove)
9826 *list_p = OMP_CLAUSE_CHAIN (c);
9827 else
9828 list_p = &OMP_CLAUSE_CHAIN (c);
9831 if (folded_dep)
9833 if (neg_offset_p)
9834 folded_deps[0] = -folded_deps[0];
9836 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9837 if (POINTER_TYPE_P (itype))
9838 itype = sizetype;
9840 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9841 = wide_int_to_tree (itype, folded_deps[0]);
9842 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9843 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9846 lower_omp_ordered_ret:
9848 /* Ordered without clauses is #pragma omp threads, while we want
9849 a nop instead if we remove all clauses. */
9850 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9851 gsi_replace (gsi_p, gimple_build_nop (), true);
9855 /* Expand code for an OpenMP ordered directive. */
9857 static void
9858 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9860 tree block;
9861 gimple *stmt = gsi_stmt (*gsi_p), *g;
9862 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9863 gcall *x;
9864 gbind *bind;
9865 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9866 OMP_CLAUSE_SIMD);
9867 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9868 loop. */
9869 bool maybe_simt
9870 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9871 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9872 OMP_CLAUSE_THREADS);
9874 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9875 OMP_CLAUSE_DEPEND))
9877 /* FIXME: This is needs to be moved to the expansion to verify various
9878 conditions only testable on cfg with dominators computed, and also
9879 all the depend clauses to be merged still might need to be available
9880 for the runtime checks. */
9881 if (0)
9882 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9883 return;
9886 push_gimplify_context ();
9888 block = make_node (BLOCK);
9889 bind = gimple_build_bind (NULL, NULL, block);
9890 gsi_replace (gsi_p, bind, true);
9891 gimple_bind_add_stmt (bind, stmt);
9893 if (simd)
9895 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9896 build_int_cst (NULL_TREE, threads));
9897 cfun->has_simduid_loops = true;
9899 else
9900 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9902 gimple_bind_add_stmt (bind, x);
9904 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9905 if (maybe_simt)
9907 counter = create_tmp_var (integer_type_node);
9908 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9909 gimple_call_set_lhs (g, counter);
9910 gimple_bind_add_stmt (bind, g);
9912 body = create_artificial_label (UNKNOWN_LOCATION);
9913 test = create_artificial_label (UNKNOWN_LOCATION);
9914 gimple_bind_add_stmt (bind, gimple_build_label (body));
9916 tree simt_pred = create_tmp_var (integer_type_node);
9917 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9918 gimple_call_set_lhs (g, simt_pred);
9919 gimple_bind_add_stmt (bind, g);
9921 tree t = create_artificial_label (UNKNOWN_LOCATION);
9922 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9923 gimple_bind_add_stmt (bind, g);
9925 gimple_bind_add_stmt (bind, gimple_build_label (t));
9927 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9928 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9929 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9930 gimple_omp_set_body (stmt, NULL);
9932 if (maybe_simt)
9934 gimple_bind_add_stmt (bind, gimple_build_label (test));
9935 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9936 gimple_bind_add_stmt (bind, g);
9938 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9939 tree nonneg = create_tmp_var (integer_type_node);
9940 gimple_seq tseq = NULL;
9941 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9942 gimple_bind_add_seq (bind, tseq);
9944 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9945 gimple_call_set_lhs (g, nonneg);
9946 gimple_bind_add_stmt (bind, g);
9948 tree end = create_artificial_label (UNKNOWN_LOCATION);
9949 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9950 gimple_bind_add_stmt (bind, g);
9952 gimple_bind_add_stmt (bind, gimple_build_label (end));
9954 if (simd)
9955 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9956 build_int_cst (NULL_TREE, threads));
9957 else
9958 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9960 gimple_bind_add_stmt (bind, x);
9962 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9964 pop_gimplify_context (bind);
9966 gimple_bind_append_vars (bind, ctx->block_vars);
9967 BLOCK_VARS (block) = gimple_bind_vars (bind);
9971 /* Expand code for an OpenMP scan directive and the structured block
9972 before the scan directive. */
9974 static void
9975 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9977 gimple *stmt = gsi_stmt (*gsi_p);
9978 bool has_clauses
9979 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9980 tree lane = NULL_TREE;
9981 gimple_seq before = NULL;
9982 omp_context *octx = ctx->outer;
9983 gcc_assert (octx);
9984 if (octx->scan_exclusive && !has_clauses)
9986 gimple_stmt_iterator gsi2 = *gsi_p;
9987 gsi_next (&gsi2);
9988 gimple *stmt2 = gsi_stmt (gsi2);
9989 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9990 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9991 the one with exclusive clause(s), comes first. */
9992 if (stmt2
9993 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9994 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9996 gsi_remove (gsi_p, false);
9997 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9998 ctx = maybe_lookup_ctx (stmt2);
9999 gcc_assert (ctx);
10000 lower_omp_scan (gsi_p, ctx);
10001 return;
10005 bool input_phase = has_clauses ^ octx->scan_inclusive;
10006 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10007 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10008 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10009 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10010 && !gimple_omp_for_combined_p (octx->stmt));
10011 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10012 if (is_for_simd && octx->for_simd_scan_phase)
10013 is_simd = false;
10014 if (is_simd)
10015 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10016 OMP_CLAUSE__SIMDUID_))
10018 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10019 lane = create_tmp_var (unsigned_type_node);
10020 tree t = build_int_cst (integer_type_node,
10021 input_phase ? 1
10022 : octx->scan_inclusive ? 2 : 3);
10023 gimple *g
10024 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10025 gimple_call_set_lhs (g, lane);
10026 gimple_seq_add_stmt (&before, g);
10029 if (is_simd || is_for)
10031 for (tree c = gimple_omp_for_clauses (octx->stmt);
10032 c; c = OMP_CLAUSE_CHAIN (c))
10033 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10034 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10036 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10037 tree var = OMP_CLAUSE_DECL (c);
10038 tree new_var = lookup_decl (var, octx);
10039 tree val = new_var;
10040 tree var2 = NULL_TREE;
10041 tree var3 = NULL_TREE;
10042 tree var4 = NULL_TREE;
10043 tree lane0 = NULL_TREE;
10044 tree new_vard = new_var;
10045 if (omp_privatize_by_reference (var))
10047 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10048 val = new_var;
10050 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10052 val = DECL_VALUE_EXPR (new_vard);
10053 if (new_vard != new_var)
10055 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10056 val = TREE_OPERAND (val, 0);
10058 if (TREE_CODE (val) == ARRAY_REF
10059 && VAR_P (TREE_OPERAND (val, 0)))
10061 tree v = TREE_OPERAND (val, 0);
10062 if (lookup_attribute ("omp simd array",
10063 DECL_ATTRIBUTES (v)))
10065 val = unshare_expr (val);
10066 lane0 = TREE_OPERAND (val, 1);
10067 TREE_OPERAND (val, 1) = lane;
10068 var2 = lookup_decl (v, octx);
10069 if (octx->scan_exclusive)
10070 var4 = lookup_decl (var2, octx);
10071 if (input_phase
10072 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10073 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10074 if (!input_phase)
10076 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10077 var2, lane, NULL_TREE, NULL_TREE);
10078 TREE_THIS_NOTRAP (var2) = 1;
10079 if (octx->scan_exclusive)
10081 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10082 var4, lane, NULL_TREE,
10083 NULL_TREE);
10084 TREE_THIS_NOTRAP (var4) = 1;
10087 else
10088 var2 = val;
10091 gcc_assert (var2);
10093 else
10095 var2 = build_outer_var_ref (var, octx);
10096 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10098 var3 = maybe_lookup_decl (new_vard, octx);
10099 if (var3 == new_vard || var3 == NULL_TREE)
10100 var3 = NULL_TREE;
10101 else if (is_simd && octx->scan_exclusive && !input_phase)
10103 var4 = maybe_lookup_decl (var3, octx);
10104 if (var4 == var3 || var4 == NULL_TREE)
10106 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10108 var4 = var3;
10109 var3 = NULL_TREE;
10111 else
10112 var4 = NULL_TREE;
10116 if (is_simd
10117 && octx->scan_exclusive
10118 && !input_phase
10119 && var4 == NULL_TREE)
10120 var4 = create_tmp_var (TREE_TYPE (val));
10122 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10124 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10125 if (input_phase)
10127 if (var3)
10129 /* If we've added a separate identity element
10130 variable, copy it over into val. */
10131 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10132 var3);
10133 gimplify_and_add (x, &before);
10135 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10137 /* Otherwise, assign to it the identity element. */
10138 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10139 if (is_for)
10140 tseq = copy_gimple_seq_and_replace_locals (tseq);
10141 tree ref = build_outer_var_ref (var, octx);
10142 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10143 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10144 if (x)
10146 if (new_vard != new_var)
10147 val = build_fold_addr_expr_loc (clause_loc, val);
10148 SET_DECL_VALUE_EXPR (new_vard, val);
10150 SET_DECL_VALUE_EXPR (placeholder, ref);
10151 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10152 lower_omp (&tseq, octx);
10153 if (x)
10154 SET_DECL_VALUE_EXPR (new_vard, x);
10155 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10156 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10157 gimple_seq_add_seq (&before, tseq);
10158 if (is_simd)
10159 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10162 else if (is_simd)
10164 tree x;
10165 if (octx->scan_exclusive)
10167 tree v4 = unshare_expr (var4);
10168 tree v2 = unshare_expr (var2);
10169 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10170 gimplify_and_add (x, &before);
10172 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10173 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10174 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10175 tree vexpr = val;
10176 if (x && new_vard != new_var)
10177 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10178 if (x)
10179 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10180 SET_DECL_VALUE_EXPR (placeholder, var2);
10181 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10182 lower_omp (&tseq, octx);
10183 gimple_seq_add_seq (&before, tseq);
10184 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10185 if (x)
10186 SET_DECL_VALUE_EXPR (new_vard, x);
10187 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10188 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10189 if (octx->scan_inclusive)
10191 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10192 var2);
10193 gimplify_and_add (x, &before);
10195 else if (lane0 == NULL_TREE)
10197 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10198 var4);
10199 gimplify_and_add (x, &before);
10203 else
10205 if (input_phase)
10207 /* input phase. Set val to initializer before
10208 the body. */
10209 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10210 gimplify_assign (val, x, &before);
10212 else if (is_simd)
10214 /* scan phase. */
10215 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10216 if (code == MINUS_EXPR)
10217 code = PLUS_EXPR;
10219 tree x = build2 (code, TREE_TYPE (var2),
10220 unshare_expr (var2), unshare_expr (val));
10221 if (octx->scan_inclusive)
10223 gimplify_assign (unshare_expr (var2), x, &before);
10224 gimplify_assign (val, var2, &before);
10226 else
10228 gimplify_assign (unshare_expr (var4),
10229 unshare_expr (var2), &before);
10230 gimplify_assign (var2, x, &before);
10231 if (lane0 == NULL_TREE)
10232 gimplify_assign (val, var4, &before);
10236 if (octx->scan_exclusive && !input_phase && lane0)
10238 tree vexpr = unshare_expr (var4);
10239 TREE_OPERAND (vexpr, 1) = lane0;
10240 if (new_vard != new_var)
10241 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10242 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10246 if (is_simd && !is_for_simd)
10248 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10249 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10250 gsi_replace (gsi_p, gimple_build_nop (), true);
10251 return;
10253 lower_omp (gimple_omp_body_ptr (stmt), octx);
10254 if (before)
10256 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10257 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10262 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10263 substitution of a couple of function calls. But in the NAMED case,
10264 requires that languages coordinate a symbol name. It is therefore
10265 best put here in common code. */
10267 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10269 static void
10270 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10272 tree block;
10273 tree name, lock, unlock;
10274 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10275 gbind *bind;
10276 location_t loc = gimple_location (stmt);
10277 gimple_seq tbody;
10279 name = gimple_omp_critical_name (stmt);
10280 if (name)
10282 tree decl;
10284 if (!critical_name_mutexes)
10285 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10287 tree *n = critical_name_mutexes->get (name);
10288 if (n == NULL)
10290 char *new_str;
10292 decl = create_tmp_var_raw (ptr_type_node);
10294 new_str = ACONCAT ((".gomp_critical_user_",
10295 IDENTIFIER_POINTER (name), NULL));
10296 DECL_NAME (decl) = get_identifier (new_str);
10297 TREE_PUBLIC (decl) = 1;
10298 TREE_STATIC (decl) = 1;
10299 DECL_COMMON (decl) = 1;
10300 DECL_ARTIFICIAL (decl) = 1;
10301 DECL_IGNORED_P (decl) = 1;
10303 varpool_node::finalize_decl (decl);
10305 critical_name_mutexes->put (name, decl);
10307 else
10308 decl = *n;
10310 /* If '#pragma omp critical' is inside offloaded region or
10311 inside function marked as offloadable, the symbol must be
10312 marked as offloadable too. */
10313 omp_context *octx;
10314 if (cgraph_node::get (current_function_decl)->offloadable)
10315 varpool_node::get_create (decl)->offloadable = 1;
10316 else
10317 for (octx = ctx->outer; octx; octx = octx->outer)
10318 if (is_gimple_omp_offloaded (octx->stmt))
10320 varpool_node::get_create (decl)->offloadable = 1;
10321 break;
10324 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10325 lock = build_call_expr_loc (loc, lock, 1,
10326 build_fold_addr_expr_loc (loc, decl));
10328 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10329 unlock = build_call_expr_loc (loc, unlock, 1,
10330 build_fold_addr_expr_loc (loc, decl));
10332 else
10334 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10335 lock = build_call_expr_loc (loc, lock, 0);
10337 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10338 unlock = build_call_expr_loc (loc, unlock, 0);
10341 push_gimplify_context ();
10343 block = make_node (BLOCK);
10344 bind = gimple_build_bind (NULL, NULL, block);
10345 gsi_replace (gsi_p, bind, true);
10346 gimple_bind_add_stmt (bind, stmt);
10348 tbody = gimple_bind_body (bind);
10349 gimplify_and_add (lock, &tbody);
10350 gimple_bind_set_body (bind, tbody);
10352 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10353 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10354 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10355 gimple_omp_set_body (stmt, NULL);
10357 tbody = gimple_bind_body (bind);
10358 gimplify_and_add (unlock, &tbody);
10359 gimple_bind_set_body (bind, tbody);
10361 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10363 pop_gimplify_context (bind);
10364 gimple_bind_append_vars (bind, ctx->block_vars);
10365 BLOCK_VARS (block) = gimple_bind_vars (bind);
10368 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10369 for a lastprivate clause. Given a loop control predicate of (V
10370 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10371 is appended to *DLIST, iterator initialization is appended to
10372 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10373 to be emitted in a critical section. */
10375 static void
10376 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10377 gimple_seq *dlist, gimple_seq *clist,
10378 struct omp_context *ctx)
10380 tree clauses, cond, vinit;
10381 enum tree_code cond_code;
10382 gimple_seq stmts;
10384 cond_code = fd->loop.cond_code;
10385 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10387 /* When possible, use a strict equality expression. This can let VRP
10388 type optimizations deduce the value and remove a copy. */
10389 if (tree_fits_shwi_p (fd->loop.step))
10391 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10392 if (step == 1 || step == -1)
10393 cond_code = EQ_EXPR;
10396 tree n2 = fd->loop.n2;
10397 if (fd->collapse > 1
10398 && TREE_CODE (n2) != INTEGER_CST
10399 && gimple_omp_for_combined_into_p (fd->for_stmt))
10401 struct omp_context *taskreg_ctx = NULL;
10402 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10404 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10405 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10406 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10408 if (gimple_omp_for_combined_into_p (gfor))
10410 gcc_assert (ctx->outer->outer
10411 && is_parallel_ctx (ctx->outer->outer));
10412 taskreg_ctx = ctx->outer->outer;
10414 else
10416 struct omp_for_data outer_fd;
10417 omp_extract_for_data (gfor, &outer_fd, NULL);
10418 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10421 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10422 taskreg_ctx = ctx->outer->outer;
10424 else if (is_taskreg_ctx (ctx->outer))
10425 taskreg_ctx = ctx->outer;
10426 if (taskreg_ctx)
10428 int i;
10429 tree taskreg_clauses
10430 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10431 tree innerc = omp_find_clause (taskreg_clauses,
10432 OMP_CLAUSE__LOOPTEMP_);
10433 gcc_assert (innerc);
10434 int count = fd->collapse;
10435 if (fd->non_rect
10436 && fd->last_nonrect == fd->first_nonrect + 1)
10437 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10438 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10439 count += 4;
10440 for (i = 0; i < count; i++)
10442 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10443 OMP_CLAUSE__LOOPTEMP_);
10444 gcc_assert (innerc);
10446 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10447 OMP_CLAUSE__LOOPTEMP_);
10448 if (innerc)
10449 n2 = fold_convert (TREE_TYPE (n2),
10450 lookup_decl (OMP_CLAUSE_DECL (innerc),
10451 taskreg_ctx));
10454 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10456 clauses = gimple_omp_for_clauses (fd->for_stmt);
10457 stmts = NULL;
10458 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10459 if (!gimple_seq_empty_p (stmts))
10461 gimple_seq_add_seq (&stmts, *dlist);
10462 *dlist = stmts;
10464 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10465 vinit = fd->loop.n1;
10466 if (cond_code == EQ_EXPR
10467 && tree_fits_shwi_p (fd->loop.n2)
10468 && ! integer_zerop (fd->loop.n2))
10469 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10470 else
10471 vinit = unshare_expr (vinit);
10473 /* Initialize the iterator variable, so that threads that don't execute
10474 any iterations don't execute the lastprivate clauses by accident. */
10475 gimplify_assign (fd->loop.v, vinit, body_p);
10479 /* OpenACC privatization.
10481 Or, in other words, *sharing* at the respective OpenACC level of
10482 parallelism.
10484 From a correctness perspective, a non-addressable variable can't be accessed
10485 outside the current thread, so it can go in a (faster than shared memory)
10486 register -- though that register may need to be broadcast in some
10487 circumstances. A variable can only meaningfully be "shared" across workers
10488 or vector lanes if its address is taken, e.g. by a call to an atomic
10489 builtin.
10491 From an optimisation perspective, the answer might be fuzzier: maybe
10492 sometimes, using shared memory directly would be faster than
10493 broadcasting. */
10495 static void
10496 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10497 const location_t loc, const tree c,
10498 const tree decl)
10500 const dump_user_location_t d_u_loc
10501 = dump_user_location_t::from_location_t (loc);
10502 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10503 #if __GNUC__ >= 10
10504 # pragma GCC diagnostic push
10505 # pragma GCC diagnostic ignored "-Wformat"
10506 #endif
10507 dump_printf_loc (l_dump_flags, d_u_loc,
10508 "variable %<%T%> ", decl);
10509 #if __GNUC__ >= 10
10510 # pragma GCC diagnostic pop
10511 #endif
10512 if (c)
10513 dump_printf (l_dump_flags,
10514 "in %qs clause ",
10515 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10516 else
10517 dump_printf (l_dump_flags,
10518 "declared in block ");
10521 static bool
10522 oacc_privatization_candidate_p (const location_t loc, const tree c,
10523 const tree decl)
10525 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10527 /* There is some differentiation depending on block vs. clause. */
10528 bool block = !c;
10530 bool res = true;
10532 if (res && !VAR_P (decl))
10534 res = false;
10536 if (dump_enabled_p ())
10538 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10539 dump_printf (l_dump_flags,
10540 "potentially has improper OpenACC privatization level: %qs\n",
10541 get_tree_code_name (TREE_CODE (decl)));
10545 if (res && block && TREE_STATIC (decl))
10547 res = false;
10549 if (dump_enabled_p ())
10551 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10552 dump_printf (l_dump_flags,
10553 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10554 "static");
10558 if (res && block && DECL_EXTERNAL (decl))
10560 res = false;
10562 if (dump_enabled_p ())
10564 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10565 dump_printf (l_dump_flags,
10566 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10567 "external");
10571 if (res && !TREE_ADDRESSABLE (decl))
10573 res = false;
10575 if (dump_enabled_p ())
10577 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10578 dump_printf (l_dump_flags,
10579 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10580 "not addressable");
10584 if (res)
10586 if (dump_enabled_p ())
10588 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10589 dump_printf (l_dump_flags,
10590 "is candidate for adjusting OpenACC privatization level\n");
10594 if (dump_file && (dump_flags & TDF_DETAILS))
10596 print_generic_decl (dump_file, decl, dump_flags);
10597 fprintf (dump_file, "\n");
10600 return res;
10603 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10604 CTX. */
10606 static void
10607 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10609 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10610 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10612 tree decl = OMP_CLAUSE_DECL (c);
10614 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10615 continue;
10617 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10618 ctx->oacc_privatization_candidates.safe_push (decl);
10622 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10623 CTX. */
10625 static void
10626 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10628 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10630 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10631 continue;
10633 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10634 ctx->oacc_privatization_candidates.safe_push (decl);
10638 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10640 static tree
10641 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10642 struct walk_stmt_info *wi)
10644 gimple *stmt = gsi_stmt (*gsi_p);
10646 *handled_ops_p = true;
10647 switch (gimple_code (stmt))
10649 WALK_SUBSTMTS;
10651 case GIMPLE_OMP_FOR:
10652 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10653 && gimple_omp_for_combined_into_p (stmt))
10654 *handled_ops_p = false;
10655 break;
10657 case GIMPLE_OMP_SCAN:
10658 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10659 return integer_zero_node;
10660 default:
10661 break;
10663 return NULL;
10666 /* Helper function for lower_omp_for, add transformations for a worksharing
10667 loop with scan directives inside of it.
10668 For worksharing loop not combined with simd, transform:
10669 #pragma omp for reduction(inscan,+:r) private(i)
10670 for (i = 0; i < n; i = i + 1)
10673 update (r);
10675 #pragma omp scan inclusive(r)
10677 use (r);
10681 into two worksharing loops + code to merge results:
10683 num_threads = omp_get_num_threads ();
10684 thread_num = omp_get_thread_num ();
10685 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10686 <D.2099>:
10687 var2 = r;
10688 goto <D.2101>;
10689 <D.2100>:
10690 // For UDRs this is UDR init, or if ctors are needed, copy from
10691 // var3 that has been constructed to contain the neutral element.
10692 var2 = 0;
10693 <D.2101>:
10694 ivar = 0;
10695 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10696 // a shared array with num_threads elements and rprivb to a local array
10697 // number of elements equal to the number of (contiguous) iterations the
10698 // current thread will perform. controlb and controlp variables are
10699 // temporaries to handle deallocation of rprivb at the end of second
10700 // GOMP_FOR.
10701 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10702 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10703 for (i = 0; i < n; i = i + 1)
10706 // For UDRs this is UDR init or copy from var3.
10707 r = 0;
10708 // This is the input phase from user code.
10709 update (r);
10712 // For UDRs this is UDR merge.
10713 var2 = var2 + r;
10714 // Rather than handing it over to the user, save to local thread's
10715 // array.
10716 rprivb[ivar] = var2;
10717 // For exclusive scan, the above two statements are swapped.
10718 ivar = ivar + 1;
10721 // And remember the final value from this thread's into the shared
10722 // rpriva array.
10723 rpriva[(sizetype) thread_num] = var2;
10724 // If more than one thread, compute using Work-Efficient prefix sum
10725 // the inclusive parallel scan of the rpriva array.
10726 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10727 <D.2102>:
10728 GOMP_barrier ();
10729 down = 0;
10730 k = 1;
10731 num_threadsu = (unsigned int) num_threads;
10732 thread_numup1 = (unsigned int) thread_num + 1;
10733 <D.2108>:
10734 twok = k << 1;
10735 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10736 <D.2110>:
10737 down = 4294967295;
10738 k = k >> 1;
10739 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10740 <D.2112>:
10741 k = k >> 1;
10742 <D.2111>:
10743 twok = k << 1;
10744 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10745 mul = REALPART_EXPR <cplx>;
10746 ovf = IMAGPART_EXPR <cplx>;
10747 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10748 <D.2116>:
10749 andv = k & down;
10750 andvm1 = andv + 4294967295;
10751 l = mul + andvm1;
10752 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10753 <D.2120>:
10754 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10755 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10756 rpriva[l] = rpriva[l - k] + rpriva[l];
10757 <D.2117>:
10758 if (down == 0) goto <D.2121>; else goto <D.2122>;
10759 <D.2121>:
10760 k = k << 1;
10761 goto <D.2123>;
10762 <D.2122>:
10763 k = k >> 1;
10764 <D.2123>:
10765 GOMP_barrier ();
10766 if (k != 0) goto <D.2108>; else goto <D.2103>;
10767 <D.2103>:
10768 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10769 <D.2124>:
10770 // For UDRs this is UDR init or copy from var3.
10771 var2 = 0;
10772 goto <D.2126>;
10773 <D.2125>:
10774 var2 = rpriva[thread_num - 1];
10775 <D.2126>:
10776 ivar = 0;
10777 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10778 reduction(inscan,+:r) private(i)
10779 for (i = 0; i < n; i = i + 1)
10782 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10783 r = var2 + rprivb[ivar];
10786 // This is the scan phase from user code.
10787 use (r);
10788 // Plus a bump of the iterator.
10789 ivar = ivar + 1;
10791 } */
10793 static void
10794 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10795 struct omp_for_data *fd, omp_context *ctx)
10797 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10798 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10800 gimple_seq body = gimple_omp_body (stmt);
10801 gimple_stmt_iterator input1_gsi = gsi_none ();
10802 struct walk_stmt_info wi;
10803 memset (&wi, 0, sizeof (wi));
10804 wi.val_only = true;
10805 wi.info = (void *) &input1_gsi;
10806 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10807 gcc_assert (!gsi_end_p (input1_gsi));
10809 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10810 gimple_stmt_iterator gsi = input1_gsi;
10811 gsi_next (&gsi);
10812 gimple_stmt_iterator scan1_gsi = gsi;
10813 gimple *scan_stmt1 = gsi_stmt (gsi);
10814 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10816 gimple_seq input_body = gimple_omp_body (input_stmt1);
10817 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10818 gimple_omp_set_body (input_stmt1, NULL);
10819 gimple_omp_set_body (scan_stmt1, NULL);
10820 gimple_omp_set_body (stmt, NULL);
10822 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10823 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10824 gimple_omp_set_body (stmt, body);
10825 gimple_omp_set_body (input_stmt1, input_body);
10827 gimple_stmt_iterator input2_gsi = gsi_none ();
10828 memset (&wi, 0, sizeof (wi));
10829 wi.val_only = true;
10830 wi.info = (void *) &input2_gsi;
10831 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10832 gcc_assert (!gsi_end_p (input2_gsi));
10834 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10835 gsi = input2_gsi;
10836 gsi_next (&gsi);
10837 gimple_stmt_iterator scan2_gsi = gsi;
10838 gimple *scan_stmt2 = gsi_stmt (gsi);
10839 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10840 gimple_omp_set_body (scan_stmt2, scan_body);
10842 gimple_stmt_iterator input3_gsi = gsi_none ();
10843 gimple_stmt_iterator scan3_gsi = gsi_none ();
10844 gimple_stmt_iterator input4_gsi = gsi_none ();
10845 gimple_stmt_iterator scan4_gsi = gsi_none ();
10846 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10847 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10848 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10849 if (is_for_simd)
10851 memset (&wi, 0, sizeof (wi));
10852 wi.val_only = true;
10853 wi.info = (void *) &input3_gsi;
10854 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10855 gcc_assert (!gsi_end_p (input3_gsi));
10857 input_stmt3 = gsi_stmt (input3_gsi);
10858 gsi = input3_gsi;
10859 gsi_next (&gsi);
10860 scan3_gsi = gsi;
10861 scan_stmt3 = gsi_stmt (gsi);
10862 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10864 memset (&wi, 0, sizeof (wi));
10865 wi.val_only = true;
10866 wi.info = (void *) &input4_gsi;
10867 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10868 gcc_assert (!gsi_end_p (input4_gsi));
10870 input_stmt4 = gsi_stmt (input4_gsi);
10871 gsi = input4_gsi;
10872 gsi_next (&gsi);
10873 scan4_gsi = gsi;
10874 scan_stmt4 = gsi_stmt (gsi);
10875 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10877 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10878 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10881 tree num_threads = create_tmp_var (integer_type_node);
10882 tree thread_num = create_tmp_var (integer_type_node);
10883 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10884 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10885 gimple *g = gimple_build_call (nthreads_decl, 0);
10886 gimple_call_set_lhs (g, num_threads);
10887 gimple_seq_add_stmt (body_p, g);
10888 g = gimple_build_call (threadnum_decl, 0);
10889 gimple_call_set_lhs (g, thread_num);
10890 gimple_seq_add_stmt (body_p, g);
10892 tree ivar = create_tmp_var (sizetype);
10893 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10894 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10895 tree k = create_tmp_var (unsigned_type_node);
10896 tree l = create_tmp_var (unsigned_type_node);
10898 gimple_seq clist = NULL, mdlist = NULL;
10899 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10900 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10901 gimple_seq scan1_list = NULL, input2_list = NULL;
10902 gimple_seq last_list = NULL, reduc_list = NULL;
10903 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10904 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10905 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10907 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10908 tree var = OMP_CLAUSE_DECL (c);
10909 tree new_var = lookup_decl (var, ctx);
10910 tree var3 = NULL_TREE;
10911 tree new_vard = new_var;
10912 if (omp_privatize_by_reference (var))
10913 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10914 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10916 var3 = maybe_lookup_decl (new_vard, ctx);
10917 if (var3 == new_vard)
10918 var3 = NULL_TREE;
10921 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10922 tree rpriva = create_tmp_var (ptype);
10923 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10924 OMP_CLAUSE_DECL (nc) = rpriva;
10925 *cp1 = nc;
10926 cp1 = &OMP_CLAUSE_CHAIN (nc);
10928 tree rprivb = create_tmp_var (ptype);
10929 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10930 OMP_CLAUSE_DECL (nc) = rprivb;
10931 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10932 *cp1 = nc;
10933 cp1 = &OMP_CLAUSE_CHAIN (nc);
10935 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10936 if (new_vard != new_var)
10937 TREE_ADDRESSABLE (var2) = 1;
10938 gimple_add_tmp_var (var2);
10940 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10941 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10942 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10943 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10944 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10946 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10947 thread_num, integer_minus_one_node);
10948 x = fold_convert_loc (clause_loc, sizetype, x);
10949 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10950 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10951 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10952 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10954 x = fold_convert_loc (clause_loc, sizetype, l);
10955 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10956 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10957 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10958 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10960 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10961 x = fold_convert_loc (clause_loc, sizetype, x);
10962 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10963 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10964 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10965 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10967 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10968 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10969 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10970 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10972 tree var4 = is_for_simd ? new_var : var2;
10973 tree var5 = NULL_TREE, var6 = NULL_TREE;
10974 if (is_for_simd)
10976 var5 = lookup_decl (var, input_simd_ctx);
10977 var6 = lookup_decl (var, scan_simd_ctx);
10978 if (new_vard != new_var)
10980 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10981 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10984 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10986 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10987 tree val = var2;
10989 x = lang_hooks.decls.omp_clause_default_ctor
10990 (c, var2, build_outer_var_ref (var, ctx));
10991 if (x)
10992 gimplify_and_add (x, &clist);
10994 x = build_outer_var_ref (var, ctx);
10995 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10997 gimplify_and_add (x, &thr01_list);
10999 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11000 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11001 if (var3)
11003 x = unshare_expr (var4);
11004 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11005 gimplify_and_add (x, &thrn1_list);
11006 x = unshare_expr (var4);
11007 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11008 gimplify_and_add (x, &thr02_list);
11010 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11012 /* Otherwise, assign to it the identity element. */
11013 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11014 tseq = copy_gimple_seq_and_replace_locals (tseq);
11015 if (!is_for_simd)
11017 if (new_vard != new_var)
11018 val = build_fold_addr_expr_loc (clause_loc, val);
11019 SET_DECL_VALUE_EXPR (new_vard, val);
11020 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11022 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11023 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11024 lower_omp (&tseq, ctx);
11025 gimple_seq_add_seq (&thrn1_list, tseq);
11026 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11027 lower_omp (&tseq, ctx);
11028 gimple_seq_add_seq (&thr02_list, tseq);
11029 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11030 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11031 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11032 if (y)
11033 SET_DECL_VALUE_EXPR (new_vard, y);
11034 else
11036 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11037 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11041 x = unshare_expr (var4);
11042 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11043 gimplify_and_add (x, &thrn2_list);
11045 if (is_for_simd)
11047 x = unshare_expr (rprivb_ref);
11048 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11049 gimplify_and_add (x, &scan1_list);
11051 else
11053 if (ctx->scan_exclusive)
11055 x = unshare_expr (rprivb_ref);
11056 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11057 gimplify_and_add (x, &scan1_list);
11060 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11061 tseq = copy_gimple_seq_and_replace_locals (tseq);
11062 SET_DECL_VALUE_EXPR (placeholder, var2);
11063 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11064 lower_omp (&tseq, ctx);
11065 gimple_seq_add_seq (&scan1_list, tseq);
11067 if (ctx->scan_inclusive)
11069 x = unshare_expr (rprivb_ref);
11070 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11071 gimplify_and_add (x, &scan1_list);
11075 x = unshare_expr (rpriva_ref);
11076 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11077 unshare_expr (var4));
11078 gimplify_and_add (x, &mdlist);
11080 x = unshare_expr (is_for_simd ? var6 : new_var);
11081 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11082 gimplify_and_add (x, &input2_list);
11084 val = rprivb_ref;
11085 if (new_vard != new_var)
11086 val = build_fold_addr_expr_loc (clause_loc, val);
11088 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11089 tseq = copy_gimple_seq_and_replace_locals (tseq);
11090 SET_DECL_VALUE_EXPR (new_vard, val);
11091 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11092 if (is_for_simd)
11094 SET_DECL_VALUE_EXPR (placeholder, var6);
11095 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11097 else
11098 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11099 lower_omp (&tseq, ctx);
11100 if (y)
11101 SET_DECL_VALUE_EXPR (new_vard, y);
11102 else
11104 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11105 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11107 if (!is_for_simd)
11109 SET_DECL_VALUE_EXPR (placeholder, new_var);
11110 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11111 lower_omp (&tseq, ctx);
11113 gimple_seq_add_seq (&input2_list, tseq);
11115 x = build_outer_var_ref (var, ctx);
11116 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11117 gimplify_and_add (x, &last_list);
11119 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11120 gimplify_and_add (x, &reduc_list);
11121 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11122 tseq = copy_gimple_seq_and_replace_locals (tseq);
11123 val = rprival_ref;
11124 if (new_vard != new_var)
11125 val = build_fold_addr_expr_loc (clause_loc, val);
11126 SET_DECL_VALUE_EXPR (new_vard, val);
11127 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11128 SET_DECL_VALUE_EXPR (placeholder, var2);
11129 lower_omp (&tseq, ctx);
11130 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11131 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11132 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11133 if (y)
11134 SET_DECL_VALUE_EXPR (new_vard, y);
11135 else
11137 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11138 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11140 gimple_seq_add_seq (&reduc_list, tseq);
11141 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11142 gimplify_and_add (x, &reduc_list);
11144 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11145 if (x)
11146 gimplify_and_add (x, dlist);
11148 else
11150 x = build_outer_var_ref (var, ctx);
11151 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11153 x = omp_reduction_init (c, TREE_TYPE (new_var));
11154 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11155 &thrn1_list);
11156 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11158 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11160 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11161 if (code == MINUS_EXPR)
11162 code = PLUS_EXPR;
11164 if (is_for_simd)
11165 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11166 else
11168 if (ctx->scan_exclusive)
11169 gimplify_assign (unshare_expr (rprivb_ref), var2,
11170 &scan1_list);
11171 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11172 gimplify_assign (var2, x, &scan1_list);
11173 if (ctx->scan_inclusive)
11174 gimplify_assign (unshare_expr (rprivb_ref), var2,
11175 &scan1_list);
11178 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11179 &mdlist);
11181 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11182 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11184 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11185 &last_list);
11187 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11188 unshare_expr (rprival_ref));
11189 gimplify_assign (rprival_ref, x, &reduc_list);
11193 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11194 gimple_seq_add_stmt (&scan1_list, g);
11195 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11196 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11197 ? scan_stmt4 : scan_stmt2), g);
11199 tree controlb = create_tmp_var (boolean_type_node);
11200 tree controlp = create_tmp_var (ptr_type_node);
11201 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11202 OMP_CLAUSE_DECL (nc) = controlb;
11203 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11204 *cp1 = nc;
11205 cp1 = &OMP_CLAUSE_CHAIN (nc);
11206 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11207 OMP_CLAUSE_DECL (nc) = controlp;
11208 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11209 *cp1 = nc;
11210 cp1 = &OMP_CLAUSE_CHAIN (nc);
11211 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11212 OMP_CLAUSE_DECL (nc) = controlb;
11213 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11214 *cp2 = nc;
11215 cp2 = &OMP_CLAUSE_CHAIN (nc);
11216 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11217 OMP_CLAUSE_DECL (nc) = controlp;
11218 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11219 *cp2 = nc;
11220 cp2 = &OMP_CLAUSE_CHAIN (nc);
11222 *cp1 = gimple_omp_for_clauses (stmt);
11223 gimple_omp_for_set_clauses (stmt, new_clauses1);
11224 *cp2 = gimple_omp_for_clauses (new_stmt);
11225 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11227 if (is_for_simd)
11229 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11230 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11232 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11233 GSI_SAME_STMT);
11234 gsi_remove (&input3_gsi, true);
11235 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11236 GSI_SAME_STMT);
11237 gsi_remove (&scan3_gsi, true);
11238 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11239 GSI_SAME_STMT);
11240 gsi_remove (&input4_gsi, true);
11241 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11242 GSI_SAME_STMT);
11243 gsi_remove (&scan4_gsi, true);
11245 else
11247 gimple_omp_set_body (scan_stmt1, scan1_list);
11248 gimple_omp_set_body (input_stmt2, input2_list);
11251 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11252 GSI_SAME_STMT);
11253 gsi_remove (&input1_gsi, true);
11254 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11255 GSI_SAME_STMT);
11256 gsi_remove (&scan1_gsi, true);
11257 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11258 GSI_SAME_STMT);
11259 gsi_remove (&input2_gsi, true);
11260 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11261 GSI_SAME_STMT);
11262 gsi_remove (&scan2_gsi, true);
11264 gimple_seq_add_seq (body_p, clist);
11266 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11267 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11268 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11269 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11270 gimple_seq_add_stmt (body_p, g);
11271 g = gimple_build_label (lab1);
11272 gimple_seq_add_stmt (body_p, g);
11273 gimple_seq_add_seq (body_p, thr01_list);
11274 g = gimple_build_goto (lab3);
11275 gimple_seq_add_stmt (body_p, g);
11276 g = gimple_build_label (lab2);
11277 gimple_seq_add_stmt (body_p, g);
11278 gimple_seq_add_seq (body_p, thrn1_list);
11279 g = gimple_build_label (lab3);
11280 gimple_seq_add_stmt (body_p, g);
11282 g = gimple_build_assign (ivar, size_zero_node);
11283 gimple_seq_add_stmt (body_p, g);
11285 gimple_seq_add_stmt (body_p, stmt);
11286 gimple_seq_add_seq (body_p, body);
11287 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11288 fd->loop.v));
11290 g = gimple_build_omp_return (true);
11291 gimple_seq_add_stmt (body_p, g);
11292 gimple_seq_add_seq (body_p, mdlist);
11294 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11295 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11296 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11297 gimple_seq_add_stmt (body_p, g);
11298 g = gimple_build_label (lab1);
11299 gimple_seq_add_stmt (body_p, g);
11301 g = omp_build_barrier (NULL);
11302 gimple_seq_add_stmt (body_p, g);
11304 tree down = create_tmp_var (unsigned_type_node);
11305 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11306 gimple_seq_add_stmt (body_p, g);
11308 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11309 gimple_seq_add_stmt (body_p, g);
11311 tree num_threadsu = create_tmp_var (unsigned_type_node);
11312 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11313 gimple_seq_add_stmt (body_p, g);
11315 tree thread_numu = create_tmp_var (unsigned_type_node);
11316 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11317 gimple_seq_add_stmt (body_p, g);
11319 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11320 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11321 build_int_cst (unsigned_type_node, 1));
11322 gimple_seq_add_stmt (body_p, g);
11324 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11325 g = gimple_build_label (lab3);
11326 gimple_seq_add_stmt (body_p, g);
11328 tree twok = create_tmp_var (unsigned_type_node);
11329 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11330 gimple_seq_add_stmt (body_p, g);
11332 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11333 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11334 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11335 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11336 gimple_seq_add_stmt (body_p, g);
11337 g = gimple_build_label (lab4);
11338 gimple_seq_add_stmt (body_p, g);
11339 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11340 gimple_seq_add_stmt (body_p, g);
11341 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11342 gimple_seq_add_stmt (body_p, g);
11344 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11345 gimple_seq_add_stmt (body_p, g);
11346 g = gimple_build_label (lab6);
11347 gimple_seq_add_stmt (body_p, g);
11349 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11350 gimple_seq_add_stmt (body_p, g);
11352 g = gimple_build_label (lab5);
11353 gimple_seq_add_stmt (body_p, g);
11355 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11356 gimple_seq_add_stmt (body_p, g);
11358 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11359 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11360 gimple_call_set_lhs (g, cplx);
11361 gimple_seq_add_stmt (body_p, g);
11362 tree mul = create_tmp_var (unsigned_type_node);
11363 g = gimple_build_assign (mul, REALPART_EXPR,
11364 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11365 gimple_seq_add_stmt (body_p, g);
11366 tree ovf = create_tmp_var (unsigned_type_node);
11367 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11368 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11369 gimple_seq_add_stmt (body_p, g);
11371 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11372 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11373 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11374 lab7, lab8);
11375 gimple_seq_add_stmt (body_p, g);
11376 g = gimple_build_label (lab7);
11377 gimple_seq_add_stmt (body_p, g);
11379 tree andv = create_tmp_var (unsigned_type_node);
11380 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11381 gimple_seq_add_stmt (body_p, g);
11382 tree andvm1 = create_tmp_var (unsigned_type_node);
11383 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11384 build_minus_one_cst (unsigned_type_node));
11385 gimple_seq_add_stmt (body_p, g);
11387 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11388 gimple_seq_add_stmt (body_p, g);
11390 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11391 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11392 gimple_seq_add_stmt (body_p, g);
11393 g = gimple_build_label (lab9);
11394 gimple_seq_add_stmt (body_p, g);
11395 gimple_seq_add_seq (body_p, reduc_list);
11396 g = gimple_build_label (lab8);
11397 gimple_seq_add_stmt (body_p, g);
11399 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11400 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11401 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11402 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11403 lab10, lab11);
11404 gimple_seq_add_stmt (body_p, g);
11405 g = gimple_build_label (lab10);
11406 gimple_seq_add_stmt (body_p, g);
11407 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11408 gimple_seq_add_stmt (body_p, g);
11409 g = gimple_build_goto (lab12);
11410 gimple_seq_add_stmt (body_p, g);
11411 g = gimple_build_label (lab11);
11412 gimple_seq_add_stmt (body_p, g);
11413 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11414 gimple_seq_add_stmt (body_p, g);
11415 g = gimple_build_label (lab12);
11416 gimple_seq_add_stmt (body_p, g);
11418 g = omp_build_barrier (NULL);
11419 gimple_seq_add_stmt (body_p, g);
11421 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11422 lab3, lab2);
11423 gimple_seq_add_stmt (body_p, g);
11425 g = gimple_build_label (lab2);
11426 gimple_seq_add_stmt (body_p, g);
11428 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11429 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11430 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11431 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11432 gimple_seq_add_stmt (body_p, g);
11433 g = gimple_build_label (lab1);
11434 gimple_seq_add_stmt (body_p, g);
11435 gimple_seq_add_seq (body_p, thr02_list);
11436 g = gimple_build_goto (lab3);
11437 gimple_seq_add_stmt (body_p, g);
11438 g = gimple_build_label (lab2);
11439 gimple_seq_add_stmt (body_p, g);
11440 gimple_seq_add_seq (body_p, thrn2_list);
11441 g = gimple_build_label (lab3);
11442 gimple_seq_add_stmt (body_p, g);
11444 g = gimple_build_assign (ivar, size_zero_node);
11445 gimple_seq_add_stmt (body_p, g);
11446 gimple_seq_add_stmt (body_p, new_stmt);
11447 gimple_seq_add_seq (body_p, new_body);
11449 gimple_seq new_dlist = NULL;
11450 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11451 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11452 tree num_threadsm1 = create_tmp_var (integer_type_node);
11453 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11454 integer_minus_one_node);
11455 gimple_seq_add_stmt (&new_dlist, g);
11456 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11457 gimple_seq_add_stmt (&new_dlist, g);
11458 g = gimple_build_label (lab1);
11459 gimple_seq_add_stmt (&new_dlist, g);
11460 gimple_seq_add_seq (&new_dlist, last_list);
11461 g = gimple_build_label (lab2);
11462 gimple_seq_add_stmt (&new_dlist, g);
11463 gimple_seq_add_seq (&new_dlist, *dlist);
11464 *dlist = new_dlist;
11467 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11468 the addresses of variables to be made private at the surrounding
11469 parallelism level. Such functions appear in the gimple code stream in two
11470 forms, e.g. for a partitioned loop:
11472 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11473 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11474 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11475 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11477 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11478 not as part of a HEAD_MARK sequence:
11480 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11482 For such stand-alone appearances, the 3rd argument is always 0, denoting
11483 gang partitioning. */
11485 static gcall *
11486 lower_oacc_private_marker (omp_context *ctx)
11488 if (ctx->oacc_privatization_candidates.length () == 0)
11489 return NULL;
11491 auto_vec<tree, 5> args;
11493 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11494 args.quick_push (integer_zero_node);
11495 args.quick_push (integer_minus_one_node);
11497 int i;
11498 tree decl;
11499 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11501 for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11503 tree inner_decl = maybe_lookup_decl (decl, thisctx);
11504 if (inner_decl)
11506 decl = inner_decl;
11507 break;
11510 gcc_checking_assert (decl);
11512 tree addr = build_fold_addr_expr (decl);
11513 args.safe_push (addr);
11516 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11519 /* Lower code for an OMP loop directive. */
11521 static void
11522 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11524 tree *rhs_p, block;
11525 struct omp_for_data fd, *fdp = NULL;
11526 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11527 gbind *new_stmt;
11528 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11529 gimple_seq cnt_list = NULL, clist = NULL;
11530 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11531 size_t i;
11533 push_gimplify_context ();
11535 if (is_gimple_omp_oacc (ctx->stmt))
11536 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11538 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11540 block = make_node (BLOCK);
11541 new_stmt = gimple_build_bind (NULL, NULL, block);
11542 /* Replace at gsi right away, so that 'stmt' is no member
11543 of a sequence anymore as we're going to add to a different
11544 one below. */
11545 gsi_replace (gsi_p, new_stmt, true);
11547 /* Move declaration of temporaries in the loop body before we make
11548 it go away. */
11549 omp_for_body = gimple_omp_body (stmt);
11550 if (!gimple_seq_empty_p (omp_for_body)
11551 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11553 gbind *inner_bind
11554 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11555 tree vars = gimple_bind_vars (inner_bind);
11556 if (is_gimple_omp_oacc (ctx->stmt))
11557 oacc_privatization_scan_decl_chain (ctx, vars);
11558 gimple_bind_append_vars (new_stmt, vars);
11559 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11560 keep them on the inner_bind and it's block. */
11561 gimple_bind_set_vars (inner_bind, NULL_TREE);
11562 if (gimple_bind_block (inner_bind))
11563 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11566 if (gimple_omp_for_combined_into_p (stmt))
11568 omp_extract_for_data (stmt, &fd, NULL);
11569 fdp = &fd;
11571 /* We need two temporaries with fd.loop.v type (istart/iend)
11572 and then (fd.collapse - 1) temporaries with the same
11573 type for count2 ... countN-1 vars if not constant. */
11574 size_t count = 2;
11575 tree type = fd.iter_type;
11576 if (fd.collapse > 1
11577 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11578 count += fd.collapse - 1;
11579 size_t count2 = 0;
11580 tree type2 = NULL_TREE;
11581 bool taskreg_for
11582 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11583 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11584 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11585 tree simtc = NULL;
11586 tree clauses = *pc;
11587 if (fd.collapse > 1
11588 && fd.non_rect
11589 && fd.last_nonrect == fd.first_nonrect + 1
11590 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11591 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11592 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11594 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11595 type2 = TREE_TYPE (v);
11596 count++;
11597 count2 = 3;
11599 if (taskreg_for)
11600 outerc
11601 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11602 OMP_CLAUSE__LOOPTEMP_);
11603 if (ctx->simt_stmt)
11604 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11605 OMP_CLAUSE__LOOPTEMP_);
11606 for (i = 0; i < count + count2; i++)
11608 tree temp;
11609 if (taskreg_for)
11611 gcc_assert (outerc);
11612 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11613 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11614 OMP_CLAUSE__LOOPTEMP_);
11616 else
11618 /* If there are 2 adjacent SIMD stmts, one with _simt_
11619 clause, another without, make sure they have the same
11620 decls in _looptemp_ clauses, because the outer stmt
11621 they are combined into will look up just one inner_stmt. */
11622 if (ctx->simt_stmt)
11623 temp = OMP_CLAUSE_DECL (simtc);
11624 else
11625 temp = create_tmp_var (i >= count ? type2 : type);
11626 insert_decl_map (&ctx->outer->cb, temp, temp);
11628 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11629 OMP_CLAUSE_DECL (*pc) = temp;
11630 pc = &OMP_CLAUSE_CHAIN (*pc);
11631 if (ctx->simt_stmt)
11632 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11633 OMP_CLAUSE__LOOPTEMP_);
11635 *pc = clauses;
11638 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11639 dlist = NULL;
11640 body = NULL;
11641 tree rclauses
11642 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11643 OMP_CLAUSE_REDUCTION);
11644 tree rtmp = NULL_TREE;
11645 if (rclauses)
11647 tree type = build_pointer_type (pointer_sized_int_node);
11648 tree temp = create_tmp_var (type);
11649 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11650 OMP_CLAUSE_DECL (c) = temp;
11651 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11652 gimple_omp_for_set_clauses (stmt, c);
11653 lower_omp_task_reductions (ctx, OMP_FOR,
11654 gimple_omp_for_clauses (stmt),
11655 &tred_ilist, &tred_dlist);
11656 rclauses = c;
11657 rtmp = make_ssa_name (type);
11658 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11661 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11662 ctx);
11664 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11665 fdp);
11666 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11667 gimple_omp_for_pre_body (stmt));
11669 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11671 gcall *private_marker = NULL;
11672 if (is_gimple_omp_oacc (ctx->stmt)
11673 && !gimple_seq_empty_p (omp_for_body))
11674 private_marker = lower_oacc_private_marker (ctx);
11676 /* Lower the header expressions. At this point, we can assume that
11677 the header is of the form:
11679 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11681 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11682 using the .omp_data_s mapping, if needed. */
11683 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11685 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11686 if (TREE_CODE (*rhs_p) == TREE_VEC)
11688 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11689 TREE_VEC_ELT (*rhs_p, 1)
11690 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11691 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11692 TREE_VEC_ELT (*rhs_p, 2)
11693 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11695 else if (!is_gimple_min_invariant (*rhs_p))
11696 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11697 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11698 recompute_tree_invariant_for_addr_expr (*rhs_p);
11700 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11701 if (TREE_CODE (*rhs_p) == TREE_VEC)
11703 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11704 TREE_VEC_ELT (*rhs_p, 1)
11705 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11706 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11707 TREE_VEC_ELT (*rhs_p, 2)
11708 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11710 else if (!is_gimple_min_invariant (*rhs_p))
11711 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11712 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11713 recompute_tree_invariant_for_addr_expr (*rhs_p);
11715 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11716 if (!is_gimple_min_invariant (*rhs_p))
11717 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11719 if (rclauses)
11720 gimple_seq_add_seq (&tred_ilist, cnt_list);
11721 else
11722 gimple_seq_add_seq (&body, cnt_list);
11724 /* Once lowered, extract the bounds and clauses. */
11725 omp_extract_for_data (stmt, &fd, NULL);
11727 if (is_gimple_omp_oacc (ctx->stmt)
11728 && !ctx_in_oacc_kernels_region (ctx))
11729 lower_oacc_head_tail (gimple_location (stmt),
11730 gimple_omp_for_clauses (stmt), private_marker,
11731 &oacc_head, &oacc_tail, ctx);
11733 /* Add OpenACC partitioning and reduction markers just before the loop. */
11734 if (oacc_head)
11735 gimple_seq_add_seq (&body, oacc_head);
11737 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11739 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11740 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11741 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11742 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11744 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11745 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11746 OMP_CLAUSE_LINEAR_STEP (c)
11747 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11748 ctx);
11751 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11752 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11753 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11754 else
11756 gimple_seq_add_stmt (&body, stmt);
11757 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11760 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11761 fd.loop.v));
11763 /* After the loop, add exit clauses. */
11764 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11766 if (clist)
11768 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11769 gcall *g = gimple_build_call (fndecl, 0);
11770 gimple_seq_add_stmt (&body, g);
11771 gimple_seq_add_seq (&body, clist);
11772 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11773 g = gimple_build_call (fndecl, 0);
11774 gimple_seq_add_stmt (&body, g);
11777 if (ctx->cancellable)
11778 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11780 gimple_seq_add_seq (&body, dlist);
11782 if (rclauses)
11784 gimple_seq_add_seq (&tred_ilist, body);
11785 body = tred_ilist;
11788 body = maybe_catch_exception (body);
11790 /* Region exit marker goes at the end of the loop body. */
11791 gimple *g = gimple_build_omp_return (fd.have_nowait);
11792 gimple_seq_add_stmt (&body, g);
11794 gimple_seq_add_seq (&body, tred_dlist);
11796 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11798 if (rclauses)
11799 OMP_CLAUSE_DECL (rclauses) = rtmp;
11801 /* Add OpenACC joining and reduction markers just after the loop. */
11802 if (oacc_tail)
11803 gimple_seq_add_seq (&body, oacc_tail);
11805 pop_gimplify_context (new_stmt);
11807 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11808 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11809 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11810 if (BLOCK_VARS (block))
11811 TREE_USED (block) = 1;
11813 gimple_bind_set_body (new_stmt, body);
11814 gimple_omp_set_body (stmt, NULL);
11815 gimple_omp_for_set_pre_body (stmt, NULL);
11818 /* Callback for walk_stmts. Check if the current statement only contains
11819 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11821 static tree
11822 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11823 bool *handled_ops_p,
11824 struct walk_stmt_info *wi)
11826 int *info = (int *) wi->info;
11827 gimple *stmt = gsi_stmt (*gsi_p);
11829 *handled_ops_p = true;
11830 switch (gimple_code (stmt))
11832 WALK_SUBSTMTS;
11834 case GIMPLE_DEBUG:
11835 break;
11836 case GIMPLE_OMP_FOR:
11837 case GIMPLE_OMP_SECTIONS:
11838 *info = *info == 0 ? 1 : -1;
11839 break;
11840 default:
11841 *info = -1;
11842 break;
11844 return NULL;
11847 struct omp_taskcopy_context
11849 /* This field must be at the beginning, as we do "inheritance": Some
11850 callback functions for tree-inline.c (e.g., omp_copy_decl)
11851 receive a copy_body_data pointer that is up-casted to an
11852 omp_context pointer. */
11853 copy_body_data cb;
11854 omp_context *ctx;
11857 static tree
11858 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11860 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11862 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11863 return create_tmp_var (TREE_TYPE (var));
11865 return var;
11868 static tree
11869 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11871 tree name, new_fields = NULL, type, f;
11873 type = lang_hooks.types.make_type (RECORD_TYPE);
11874 name = DECL_NAME (TYPE_NAME (orig_type));
11875 name = build_decl (gimple_location (tcctx->ctx->stmt),
11876 TYPE_DECL, name, type);
11877 TYPE_NAME (type) = name;
11879 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11881 tree new_f = copy_node (f);
11882 DECL_CONTEXT (new_f) = type;
11883 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11884 TREE_CHAIN (new_f) = new_fields;
11885 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11886 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11887 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11888 &tcctx->cb, NULL);
11889 new_fields = new_f;
11890 tcctx->cb.decl_map->put (f, new_f);
11892 TYPE_FIELDS (type) = nreverse (new_fields);
11893 layout_type (type);
11894 return type;
11897 /* Create task copyfn. */
11899 static void
11900 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11902 struct function *child_cfun;
11903 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11904 tree record_type, srecord_type, bind, list;
11905 bool record_needs_remap = false, srecord_needs_remap = false;
11906 splay_tree_node n;
11907 struct omp_taskcopy_context tcctx;
11908 location_t loc = gimple_location (task_stmt);
11909 size_t looptempno = 0;
11911 child_fn = gimple_omp_task_copy_fn (task_stmt);
11912 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11913 gcc_assert (child_cfun->cfg == NULL);
11914 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11916 /* Reset DECL_CONTEXT on function arguments. */
11917 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11918 DECL_CONTEXT (t) = child_fn;
11920 /* Populate the function. */
11921 push_gimplify_context ();
11922 push_cfun (child_cfun);
11924 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11925 TREE_SIDE_EFFECTS (bind) = 1;
11926 list = NULL;
11927 DECL_SAVED_TREE (child_fn) = bind;
11928 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11930 /* Remap src and dst argument types if needed. */
11931 record_type = ctx->record_type;
11932 srecord_type = ctx->srecord_type;
11933 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11934 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11936 record_needs_remap = true;
11937 break;
11939 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11940 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11942 srecord_needs_remap = true;
11943 break;
11946 if (record_needs_remap || srecord_needs_remap)
11948 memset (&tcctx, '\0', sizeof (tcctx));
11949 tcctx.cb.src_fn = ctx->cb.src_fn;
11950 tcctx.cb.dst_fn = child_fn;
11951 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11952 gcc_checking_assert (tcctx.cb.src_node);
11953 tcctx.cb.dst_node = tcctx.cb.src_node;
11954 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11955 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11956 tcctx.cb.eh_lp_nr = 0;
11957 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11958 tcctx.cb.decl_map = new hash_map<tree, tree>;
11959 tcctx.ctx = ctx;
11961 if (record_needs_remap)
11962 record_type = task_copyfn_remap_type (&tcctx, record_type);
11963 if (srecord_needs_remap)
11964 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11966 else
11967 tcctx.cb.decl_map = NULL;
11969 arg = DECL_ARGUMENTS (child_fn);
11970 TREE_TYPE (arg) = build_pointer_type (record_type);
11971 sarg = DECL_CHAIN (arg);
11972 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11974 /* First pass: initialize temporaries used in record_type and srecord_type
11975 sizes and field offsets. */
11976 if (tcctx.cb.decl_map)
11977 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11978 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11980 tree *p;
11982 decl = OMP_CLAUSE_DECL (c);
11983 p = tcctx.cb.decl_map->get (decl);
11984 if (p == NULL)
11985 continue;
11986 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11987 sf = (tree) n->value;
11988 sf = *tcctx.cb.decl_map->get (sf);
11989 src = build_simple_mem_ref_loc (loc, sarg);
11990 src = omp_build_component_ref (src, sf);
11991 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11992 append_to_statement_list (t, &list);
11995 /* Second pass: copy shared var pointers and copy construct non-VLA
11996 firstprivate vars. */
11997 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11998 switch (OMP_CLAUSE_CODE (c))
12000 splay_tree_key key;
12001 case OMP_CLAUSE_SHARED:
12002 decl = OMP_CLAUSE_DECL (c);
12003 key = (splay_tree_key) decl;
12004 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12005 key = (splay_tree_key) &DECL_UID (decl);
12006 n = splay_tree_lookup (ctx->field_map, key);
12007 if (n == NULL)
12008 break;
12009 f = (tree) n->value;
12010 if (tcctx.cb.decl_map)
12011 f = *tcctx.cb.decl_map->get (f);
12012 n = splay_tree_lookup (ctx->sfield_map, key);
12013 sf = (tree) n->value;
12014 if (tcctx.cb.decl_map)
12015 sf = *tcctx.cb.decl_map->get (sf);
12016 src = build_simple_mem_ref_loc (loc, sarg);
12017 src = omp_build_component_ref (src, sf);
12018 dst = build_simple_mem_ref_loc (loc, arg);
12019 dst = omp_build_component_ref (dst, f);
12020 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12021 append_to_statement_list (t, &list);
12022 break;
12023 case OMP_CLAUSE_REDUCTION:
12024 case OMP_CLAUSE_IN_REDUCTION:
12025 decl = OMP_CLAUSE_DECL (c);
12026 if (TREE_CODE (decl) == MEM_REF)
12028 decl = TREE_OPERAND (decl, 0);
12029 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12030 decl = TREE_OPERAND (decl, 0);
12031 if (TREE_CODE (decl) == INDIRECT_REF
12032 || TREE_CODE (decl) == ADDR_EXPR)
12033 decl = TREE_OPERAND (decl, 0);
12035 key = (splay_tree_key) decl;
12036 n = splay_tree_lookup (ctx->field_map, key);
12037 if (n == NULL)
12038 break;
12039 f = (tree) n->value;
12040 if (tcctx.cb.decl_map)
12041 f = *tcctx.cb.decl_map->get (f);
12042 n = splay_tree_lookup (ctx->sfield_map, key);
12043 sf = (tree) n->value;
12044 if (tcctx.cb.decl_map)
12045 sf = *tcctx.cb.decl_map->get (sf);
12046 src = build_simple_mem_ref_loc (loc, sarg);
12047 src = omp_build_component_ref (src, sf);
12048 if (decl != OMP_CLAUSE_DECL (c)
12049 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12050 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12051 src = build_simple_mem_ref_loc (loc, src);
12052 dst = build_simple_mem_ref_loc (loc, arg);
12053 dst = omp_build_component_ref (dst, f);
12054 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12055 append_to_statement_list (t, &list);
12056 break;
12057 case OMP_CLAUSE__LOOPTEMP_:
12058 /* Fields for first two _looptemp_ clauses are initialized by
12059 GOMP_taskloop*, the rest are handled like firstprivate. */
12060 if (looptempno < 2)
12062 looptempno++;
12063 break;
12065 /* FALLTHRU */
12066 case OMP_CLAUSE__REDUCTEMP_:
12067 case OMP_CLAUSE_FIRSTPRIVATE:
12068 decl = OMP_CLAUSE_DECL (c);
12069 if (is_variable_sized (decl))
12070 break;
12071 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12072 if (n == NULL)
12073 break;
12074 f = (tree) n->value;
12075 if (tcctx.cb.decl_map)
12076 f = *tcctx.cb.decl_map->get (f);
12077 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12078 if (n != NULL)
12080 sf = (tree) n->value;
12081 if (tcctx.cb.decl_map)
12082 sf = *tcctx.cb.decl_map->get (sf);
12083 src = build_simple_mem_ref_loc (loc, sarg);
12084 src = omp_build_component_ref (src, sf);
12085 if (use_pointer_for_field (decl, NULL)
12086 || omp_privatize_by_reference (decl))
12087 src = build_simple_mem_ref_loc (loc, src);
12089 else
12090 src = decl;
12091 dst = build_simple_mem_ref_loc (loc, arg);
12092 dst = omp_build_component_ref (dst, f);
12093 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12094 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12095 else
12097 if (ctx->allocate_map)
12098 if (tree *allocatorp = ctx->allocate_map->get (decl))
12100 tree allocator = *allocatorp;
12101 HOST_WIDE_INT ialign = 0;
12102 if (TREE_CODE (allocator) == TREE_LIST)
12104 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12105 allocator = TREE_PURPOSE (allocator);
12107 if (TREE_CODE (allocator) != INTEGER_CST)
12109 n = splay_tree_lookup (ctx->sfield_map,
12110 (splay_tree_key) allocator);
12111 allocator = (tree) n->value;
12112 if (tcctx.cb.decl_map)
12113 allocator = *tcctx.cb.decl_map->get (allocator);
12114 tree a = build_simple_mem_ref_loc (loc, sarg);
12115 allocator = omp_build_component_ref (a, allocator);
12117 allocator = fold_convert (pointer_sized_int_node, allocator);
12118 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12119 tree align = build_int_cst (size_type_node,
12120 MAX (ialign,
12121 DECL_ALIGN_UNIT (decl)));
12122 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12123 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12124 allocator);
12125 ptr = fold_convert (TREE_TYPE (dst), ptr);
12126 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12127 append_to_statement_list (t, &list);
12128 dst = build_simple_mem_ref_loc (loc, dst);
12130 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12132 append_to_statement_list (t, &list);
12133 break;
12134 case OMP_CLAUSE_PRIVATE:
12135 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12136 break;
12137 decl = OMP_CLAUSE_DECL (c);
12138 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12139 f = (tree) n->value;
12140 if (tcctx.cb.decl_map)
12141 f = *tcctx.cb.decl_map->get (f);
12142 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12143 if (n != NULL)
12145 sf = (tree) n->value;
12146 if (tcctx.cb.decl_map)
12147 sf = *tcctx.cb.decl_map->get (sf);
12148 src = build_simple_mem_ref_loc (loc, sarg);
12149 src = omp_build_component_ref (src, sf);
12150 if (use_pointer_for_field (decl, NULL))
12151 src = build_simple_mem_ref_loc (loc, src);
12153 else
12154 src = decl;
12155 dst = build_simple_mem_ref_loc (loc, arg);
12156 dst = omp_build_component_ref (dst, f);
12157 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12158 append_to_statement_list (t, &list);
12159 break;
12160 default:
12161 break;
12164 /* Last pass: handle VLA firstprivates. */
12165 if (tcctx.cb.decl_map)
12166 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12167 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12169 tree ind, ptr, df;
12171 decl = OMP_CLAUSE_DECL (c);
12172 if (!is_variable_sized (decl))
12173 continue;
12174 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12175 if (n == NULL)
12176 continue;
12177 f = (tree) n->value;
12178 f = *tcctx.cb.decl_map->get (f);
12179 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12180 ind = DECL_VALUE_EXPR (decl);
12181 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12182 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12183 n = splay_tree_lookup (ctx->sfield_map,
12184 (splay_tree_key) TREE_OPERAND (ind, 0));
12185 sf = (tree) n->value;
12186 sf = *tcctx.cb.decl_map->get (sf);
12187 src = build_simple_mem_ref_loc (loc, sarg);
12188 src = omp_build_component_ref (src, sf);
12189 src = build_simple_mem_ref_loc (loc, src);
12190 dst = build_simple_mem_ref_loc (loc, arg);
12191 dst = omp_build_component_ref (dst, f);
12192 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12193 append_to_statement_list (t, &list);
12194 n = splay_tree_lookup (ctx->field_map,
12195 (splay_tree_key) TREE_OPERAND (ind, 0));
12196 df = (tree) n->value;
12197 df = *tcctx.cb.decl_map->get (df);
12198 ptr = build_simple_mem_ref_loc (loc, arg);
12199 ptr = omp_build_component_ref (ptr, df);
12200 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12201 build_fold_addr_expr_loc (loc, dst));
12202 append_to_statement_list (t, &list);
12205 t = build1 (RETURN_EXPR, void_type_node, NULL);
12206 append_to_statement_list (t, &list);
12208 if (tcctx.cb.decl_map)
12209 delete tcctx.cb.decl_map;
12210 pop_gimplify_context (NULL);
12211 BIND_EXPR_BODY (bind) = list;
12212 pop_cfun ();
12215 static void
12216 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12218 tree c, clauses;
12219 gimple *g;
12220 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12222 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12223 gcc_assert (clauses);
12224 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12225 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12226 switch (OMP_CLAUSE_DEPEND_KIND (c))
12228 case OMP_CLAUSE_DEPEND_LAST:
12229 /* Lowering already done at gimplification. */
12230 return;
12231 case OMP_CLAUSE_DEPEND_IN:
12232 cnt[2]++;
12233 break;
12234 case OMP_CLAUSE_DEPEND_OUT:
12235 case OMP_CLAUSE_DEPEND_INOUT:
12236 cnt[0]++;
12237 break;
12238 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12239 cnt[1]++;
12240 break;
12241 case OMP_CLAUSE_DEPEND_DEPOBJ:
12242 cnt[3]++;
12243 break;
12244 case OMP_CLAUSE_DEPEND_SOURCE:
12245 case OMP_CLAUSE_DEPEND_SINK:
12246 /* FALLTHRU */
12247 default:
12248 gcc_unreachable ();
12250 if (cnt[1] || cnt[3])
12251 idx = 5;
12252 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12253 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12254 tree array = create_tmp_var (type);
12255 TREE_ADDRESSABLE (array) = 1;
12256 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12257 NULL_TREE);
12258 if (idx == 5)
12260 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12261 gimple_seq_add_stmt (iseq, g);
12262 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12263 NULL_TREE);
12265 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12266 gimple_seq_add_stmt (iseq, g);
12267 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12269 r = build4 (ARRAY_REF, ptr_type_node, array,
12270 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12271 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12272 gimple_seq_add_stmt (iseq, g);
12274 for (i = 0; i < 4; i++)
12276 if (cnt[i] == 0)
12277 continue;
12278 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12279 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12280 continue;
12281 else
12283 switch (OMP_CLAUSE_DEPEND_KIND (c))
12285 case OMP_CLAUSE_DEPEND_IN:
12286 if (i != 2)
12287 continue;
12288 break;
12289 case OMP_CLAUSE_DEPEND_OUT:
12290 case OMP_CLAUSE_DEPEND_INOUT:
12291 if (i != 0)
12292 continue;
12293 break;
12294 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12295 if (i != 1)
12296 continue;
12297 break;
12298 case OMP_CLAUSE_DEPEND_DEPOBJ:
12299 if (i != 3)
12300 continue;
12301 break;
12302 default:
12303 gcc_unreachable ();
12305 tree t = OMP_CLAUSE_DECL (c);
12306 t = fold_convert (ptr_type_node, t);
12307 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12308 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12309 NULL_TREE, NULL_TREE);
12310 g = gimple_build_assign (r, t);
12311 gimple_seq_add_stmt (iseq, g);
12314 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12315 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12316 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12317 OMP_CLAUSE_CHAIN (c) = *pclauses;
12318 *pclauses = c;
12319 tree clobber = build_clobber (type);
12320 g = gimple_build_assign (array, clobber);
12321 gimple_seq_add_stmt (oseq, g);
12324 /* Lower the OpenMP parallel or task directive in the current statement
12325 in GSI_P. CTX holds context information for the directive. */
12327 static void
12328 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12330 tree clauses;
12331 tree child_fn, t;
12332 gimple *stmt = gsi_stmt (*gsi_p);
12333 gbind *par_bind, *bind, *dep_bind = NULL;
12334 gimple_seq par_body;
12335 location_t loc = gimple_location (stmt);
12337 clauses = gimple_omp_taskreg_clauses (stmt);
12338 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12339 && gimple_omp_task_taskwait_p (stmt))
12341 par_bind = NULL;
12342 par_body = NULL;
12344 else
12346 par_bind
12347 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12348 par_body = gimple_bind_body (par_bind);
12350 child_fn = ctx->cb.dst_fn;
12351 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12352 && !gimple_omp_parallel_combined_p (stmt))
12354 struct walk_stmt_info wi;
12355 int ws_num = 0;
12357 memset (&wi, 0, sizeof (wi));
12358 wi.info = &ws_num;
12359 wi.val_only = true;
12360 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12361 if (ws_num == 1)
12362 gimple_omp_parallel_set_combined_p (stmt, true);
12364 gimple_seq dep_ilist = NULL;
12365 gimple_seq dep_olist = NULL;
12366 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12367 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12369 push_gimplify_context ();
12370 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12371 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12372 &dep_ilist, &dep_olist);
12375 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12376 && gimple_omp_task_taskwait_p (stmt))
12378 if (dep_bind)
12380 gsi_replace (gsi_p, dep_bind, true);
12381 gimple_bind_add_seq (dep_bind, dep_ilist);
12382 gimple_bind_add_stmt (dep_bind, stmt);
12383 gimple_bind_add_seq (dep_bind, dep_olist);
12384 pop_gimplify_context (dep_bind);
12386 return;
12389 if (ctx->srecord_type)
12390 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12392 gimple_seq tskred_ilist = NULL;
12393 gimple_seq tskred_olist = NULL;
12394 if ((is_task_ctx (ctx)
12395 && gimple_omp_task_taskloop_p (ctx->stmt)
12396 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12397 OMP_CLAUSE_REDUCTION))
12398 || (is_parallel_ctx (ctx)
12399 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12400 OMP_CLAUSE__REDUCTEMP_)))
12402 if (dep_bind == NULL)
12404 push_gimplify_context ();
12405 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12407 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12408 : OMP_PARALLEL,
12409 gimple_omp_taskreg_clauses (ctx->stmt),
12410 &tskred_ilist, &tskred_olist);
12413 push_gimplify_context ();
12415 gimple_seq par_olist = NULL;
12416 gimple_seq par_ilist = NULL;
12417 gimple_seq par_rlist = NULL;
12418 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12419 lower_omp (&par_body, ctx);
12420 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12421 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12423 /* Declare all the variables created by mapping and the variables
12424 declared in the scope of the parallel body. */
12425 record_vars_into (ctx->block_vars, child_fn);
12426 maybe_remove_omp_member_access_dummy_vars (par_bind);
12427 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12429 if (ctx->record_type)
12431 ctx->sender_decl
12432 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12433 : ctx->record_type, ".omp_data_o");
12434 DECL_NAMELESS (ctx->sender_decl) = 1;
12435 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12436 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12439 gimple_seq olist = NULL;
12440 gimple_seq ilist = NULL;
12441 lower_send_clauses (clauses, &ilist, &olist, ctx);
12442 lower_send_shared_vars (&ilist, &olist, ctx);
12444 if (ctx->record_type)
12446 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12447 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12448 clobber));
12451 /* Once all the expansions are done, sequence all the different
12452 fragments inside gimple_omp_body. */
12454 gimple_seq new_body = NULL;
12456 if (ctx->record_type)
12458 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12459 /* fixup_child_record_type might have changed receiver_decl's type. */
12460 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12461 gimple_seq_add_stmt (&new_body,
12462 gimple_build_assign (ctx->receiver_decl, t));
12465 gimple_seq_add_seq (&new_body, par_ilist);
12466 gimple_seq_add_seq (&new_body, par_body);
12467 gimple_seq_add_seq (&new_body, par_rlist);
12468 if (ctx->cancellable)
12469 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12470 gimple_seq_add_seq (&new_body, par_olist);
12471 new_body = maybe_catch_exception (new_body);
12472 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12473 gimple_seq_add_stmt (&new_body,
12474 gimple_build_omp_continue (integer_zero_node,
12475 integer_zero_node));
12476 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12477 gimple_omp_set_body (stmt, new_body);
12479 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12480 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12481 else
12482 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12483 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12484 gimple_bind_add_seq (bind, ilist);
12485 gimple_bind_add_stmt (bind, stmt);
12486 gimple_bind_add_seq (bind, olist);
12488 pop_gimplify_context (NULL);
12490 if (dep_bind)
12492 gimple_bind_add_seq (dep_bind, dep_ilist);
12493 gimple_bind_add_seq (dep_bind, tskred_ilist);
12494 gimple_bind_add_stmt (dep_bind, bind);
12495 gimple_bind_add_seq (dep_bind, tskred_olist);
12496 gimple_bind_add_seq (dep_bind, dep_olist);
12497 pop_gimplify_context (dep_bind);
12501 /* Lower the GIMPLE_OMP_TARGET in the current statement
12502 in GSI_P. CTX holds context information for the directive. */
12504 static void
12505 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12507 tree clauses;
12508 tree child_fn, t, c;
12509 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12510 gbind *tgt_bind, *bind, *dep_bind = NULL;
12511 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12512 location_t loc = gimple_location (stmt);
12513 bool offloaded, data_region;
12514 unsigned int map_cnt = 0;
12515 tree in_reduction_clauses = NULL_TREE;
12517 offloaded = is_gimple_omp_offloaded (stmt);
12518 switch (gimple_omp_target_kind (stmt))
12520 case GF_OMP_TARGET_KIND_REGION:
12521 tree *p, *q;
12522 q = &in_reduction_clauses;
12523 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12524 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12526 *q = *p;
12527 q = &OMP_CLAUSE_CHAIN (*q);
12528 *p = OMP_CLAUSE_CHAIN (*p);
12530 else
12531 p = &OMP_CLAUSE_CHAIN (*p);
12532 *q = NULL_TREE;
12533 *p = in_reduction_clauses;
12534 /* FALLTHRU */
12535 case GF_OMP_TARGET_KIND_UPDATE:
12536 case GF_OMP_TARGET_KIND_ENTER_DATA:
12537 case GF_OMP_TARGET_KIND_EXIT_DATA:
12538 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12539 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12540 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12541 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12542 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12543 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12544 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12545 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12546 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12547 data_region = false;
12548 break;
12549 case GF_OMP_TARGET_KIND_DATA:
12550 case GF_OMP_TARGET_KIND_OACC_DATA:
12551 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12552 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12553 data_region = true;
12554 break;
12555 default:
12556 gcc_unreachable ();
12559 clauses = gimple_omp_target_clauses (stmt);
12561 gimple_seq dep_ilist = NULL;
12562 gimple_seq dep_olist = NULL;
12563 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12564 if (has_depend || in_reduction_clauses)
12566 push_gimplify_context ();
12567 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12568 if (has_depend)
12569 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12570 &dep_ilist, &dep_olist);
12571 if (in_reduction_clauses)
12572 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12573 ctx, NULL);
12576 tgt_bind = NULL;
12577 tgt_body = NULL;
12578 if (offloaded)
12580 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12581 tgt_body = gimple_bind_body (tgt_bind);
12583 else if (data_region)
12584 tgt_body = gimple_omp_body (stmt);
12585 child_fn = ctx->cb.dst_fn;
12587 push_gimplify_context ();
12588 fplist = NULL;
12590 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12591 switch (OMP_CLAUSE_CODE (c))
12593 tree var, x;
12595 default:
12596 break;
12597 case OMP_CLAUSE_MAP:
12598 #if CHECKING_P
12599 /* First check what we're prepared to handle in the following. */
12600 switch (OMP_CLAUSE_MAP_KIND (c))
12602 case GOMP_MAP_ALLOC:
12603 case GOMP_MAP_TO:
12604 case GOMP_MAP_FROM:
12605 case GOMP_MAP_TOFROM:
12606 case GOMP_MAP_POINTER:
12607 case GOMP_MAP_TO_PSET:
12608 case GOMP_MAP_DELETE:
12609 case GOMP_MAP_RELEASE:
12610 case GOMP_MAP_ALWAYS_TO:
12611 case GOMP_MAP_ALWAYS_FROM:
12612 case GOMP_MAP_ALWAYS_TOFROM:
12613 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12614 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12615 case GOMP_MAP_STRUCT:
12616 case GOMP_MAP_ALWAYS_POINTER:
12617 case GOMP_MAP_ATTACH:
12618 case GOMP_MAP_DETACH:
12619 break;
12620 case GOMP_MAP_IF_PRESENT:
12621 case GOMP_MAP_FORCE_ALLOC:
12622 case GOMP_MAP_FORCE_TO:
12623 case GOMP_MAP_FORCE_FROM:
12624 case GOMP_MAP_FORCE_TOFROM:
12625 case GOMP_MAP_FORCE_PRESENT:
12626 case GOMP_MAP_FORCE_DEVICEPTR:
12627 case GOMP_MAP_DEVICE_RESIDENT:
12628 case GOMP_MAP_LINK:
12629 case GOMP_MAP_FORCE_DETACH:
12630 gcc_assert (is_gimple_omp_oacc (stmt));
12631 break;
12632 default:
12633 gcc_unreachable ();
12635 #endif
12636 /* FALLTHRU */
12637 case OMP_CLAUSE_TO:
12638 case OMP_CLAUSE_FROM:
12639 oacc_firstprivate:
12640 var = OMP_CLAUSE_DECL (c);
12641 if (!DECL_P (var))
12643 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12644 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12645 && (OMP_CLAUSE_MAP_KIND (c)
12646 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12647 map_cnt++;
12648 continue;
12651 if (DECL_SIZE (var)
12652 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12654 tree var2 = DECL_VALUE_EXPR (var);
12655 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12656 var2 = TREE_OPERAND (var2, 0);
12657 gcc_assert (DECL_P (var2));
12658 var = var2;
12661 if (offloaded
12662 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12663 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12664 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12666 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12668 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12669 && varpool_node::get_create (var)->offloadable)
12670 continue;
12672 tree type = build_pointer_type (TREE_TYPE (var));
12673 tree new_var = lookup_decl (var, ctx);
12674 x = create_tmp_var_raw (type, get_name (new_var));
12675 gimple_add_tmp_var (x);
12676 x = build_simple_mem_ref (x);
12677 SET_DECL_VALUE_EXPR (new_var, x);
12678 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12680 continue;
12683 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12684 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12685 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12686 && is_omp_target (stmt))
12688 gcc_assert (maybe_lookup_field (c, ctx));
12689 map_cnt++;
12690 continue;
12693 if (!maybe_lookup_field (var, ctx))
12694 continue;
12696 /* Don't remap compute constructs' reduction variables, because the
12697 intermediate result must be local to each gang. */
12698 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12699 && is_gimple_omp_oacc (ctx->stmt)
12700 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12702 x = build_receiver_ref (var, true, ctx);
12703 tree new_var = lookup_decl (var, ctx);
12705 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12706 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12707 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12708 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12709 x = build_simple_mem_ref (x);
12710 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12712 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12713 if (omp_privatize_by_reference (new_var)
12714 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12715 || DECL_BY_REFERENCE (var)))
12717 /* Create a local object to hold the instance
12718 value. */
12719 tree type = TREE_TYPE (TREE_TYPE (new_var));
12720 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12721 tree inst = create_tmp_var (type, id);
12722 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12723 x = build_fold_addr_expr (inst);
12725 gimplify_assign (new_var, x, &fplist);
12727 else if (DECL_P (new_var))
12729 SET_DECL_VALUE_EXPR (new_var, x);
12730 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12732 else
12733 gcc_unreachable ();
12735 map_cnt++;
12736 break;
12738 case OMP_CLAUSE_FIRSTPRIVATE:
12739 gcc_checking_assert (offloaded);
12740 if (is_gimple_omp_oacc (ctx->stmt))
12742 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12743 gcc_checking_assert (!is_oacc_kernels (ctx));
12744 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12745 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12747 goto oacc_firstprivate;
12749 map_cnt++;
12750 var = OMP_CLAUSE_DECL (c);
12751 if (!omp_privatize_by_reference (var)
12752 && !is_gimple_reg_type (TREE_TYPE (var)))
12754 tree new_var = lookup_decl (var, ctx);
12755 if (is_variable_sized (var))
12757 tree pvar = DECL_VALUE_EXPR (var);
12758 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12759 pvar = TREE_OPERAND (pvar, 0);
12760 gcc_assert (DECL_P (pvar));
12761 tree new_pvar = lookup_decl (pvar, ctx);
12762 x = build_fold_indirect_ref (new_pvar);
12763 TREE_THIS_NOTRAP (x) = 1;
12765 else
12766 x = build_receiver_ref (var, true, ctx);
12767 SET_DECL_VALUE_EXPR (new_var, x);
12768 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12770 break;
12772 case OMP_CLAUSE_PRIVATE:
12773 gcc_checking_assert (offloaded);
12774 if (is_gimple_omp_oacc (ctx->stmt))
12776 /* No 'private' clauses on OpenACC 'kernels'. */
12777 gcc_checking_assert (!is_oacc_kernels (ctx));
12778 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12779 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12781 break;
12783 var = OMP_CLAUSE_DECL (c);
12784 if (is_variable_sized (var))
12786 tree new_var = lookup_decl (var, ctx);
12787 tree pvar = DECL_VALUE_EXPR (var);
12788 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12789 pvar = TREE_OPERAND (pvar, 0);
12790 gcc_assert (DECL_P (pvar));
12791 tree new_pvar = lookup_decl (pvar, ctx);
12792 x = build_fold_indirect_ref (new_pvar);
12793 TREE_THIS_NOTRAP (x) = 1;
12794 SET_DECL_VALUE_EXPR (new_var, x);
12795 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12797 break;
12799 case OMP_CLAUSE_USE_DEVICE_PTR:
12800 case OMP_CLAUSE_USE_DEVICE_ADDR:
12801 case OMP_CLAUSE_IS_DEVICE_PTR:
12802 var = OMP_CLAUSE_DECL (c);
12803 map_cnt++;
12804 if (is_variable_sized (var))
12806 tree new_var = lookup_decl (var, ctx);
12807 tree pvar = DECL_VALUE_EXPR (var);
12808 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12809 pvar = TREE_OPERAND (pvar, 0);
12810 gcc_assert (DECL_P (pvar));
12811 tree new_pvar = lookup_decl (pvar, ctx);
12812 x = build_fold_indirect_ref (new_pvar);
12813 TREE_THIS_NOTRAP (x) = 1;
12814 SET_DECL_VALUE_EXPR (new_var, x);
12815 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12817 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12818 && !omp_privatize_by_reference (var)
12819 && !omp_is_allocatable_or_ptr (var)
12820 && !lang_hooks.decls.omp_array_data (var, true))
12821 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12823 tree new_var = lookup_decl (var, ctx);
12824 tree type = build_pointer_type (TREE_TYPE (var));
12825 x = create_tmp_var_raw (type, get_name (new_var));
12826 gimple_add_tmp_var (x);
12827 x = build_simple_mem_ref (x);
12828 SET_DECL_VALUE_EXPR (new_var, x);
12829 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12831 else
12833 tree new_var = lookup_decl (var, ctx);
12834 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12835 gimple_add_tmp_var (x);
12836 SET_DECL_VALUE_EXPR (new_var, x);
12837 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12839 break;
12842 if (offloaded)
12844 target_nesting_level++;
12845 lower_omp (&tgt_body, ctx);
12846 target_nesting_level--;
12848 else if (data_region)
12849 lower_omp (&tgt_body, ctx);
12851 if (offloaded)
12853 /* Declare all the variables created by mapping and the variables
12854 declared in the scope of the target body. */
12855 record_vars_into (ctx->block_vars, child_fn);
12856 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12857 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12860 olist = NULL;
12861 ilist = NULL;
12862 if (ctx->record_type)
12864 ctx->sender_decl
12865 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12866 DECL_NAMELESS (ctx->sender_decl) = 1;
12867 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12868 t = make_tree_vec (3);
12869 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12870 TREE_VEC_ELT (t, 1)
12871 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12872 ".omp_data_sizes");
12873 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12874 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12875 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12876 tree tkind_type = short_unsigned_type_node;
12877 int talign_shift = 8;
12878 TREE_VEC_ELT (t, 2)
12879 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12880 ".omp_data_kinds");
12881 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12882 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12883 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12884 gimple_omp_target_set_data_arg (stmt, t);
12886 vec<constructor_elt, va_gc> *vsize;
12887 vec<constructor_elt, va_gc> *vkind;
12888 vec_alloc (vsize, map_cnt);
12889 vec_alloc (vkind, map_cnt);
12890 unsigned int map_idx = 0;
12892 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12893 switch (OMP_CLAUSE_CODE (c))
12895 tree ovar, nc, s, purpose, var, x, type;
12896 unsigned int talign;
12898 default:
12899 break;
12901 case OMP_CLAUSE_MAP:
12902 case OMP_CLAUSE_TO:
12903 case OMP_CLAUSE_FROM:
12904 oacc_firstprivate_map:
12905 nc = c;
12906 ovar = OMP_CLAUSE_DECL (c);
12907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12908 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12909 || (OMP_CLAUSE_MAP_KIND (c)
12910 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12911 break;
12912 if (!DECL_P (ovar))
12914 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12915 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12917 nc = OMP_CLAUSE_CHAIN (c);
12918 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
12919 == get_base_address (ovar));
12920 ovar = OMP_CLAUSE_DECL (nc);
12922 else
12924 tree x = build_sender_ref (ovar, ctx);
12925 tree v = ovar;
12926 if (in_reduction_clauses
12927 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12928 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12930 v = unshare_expr (v);
12931 tree *p = &v;
12932 while (handled_component_p (*p)
12933 || TREE_CODE (*p) == INDIRECT_REF
12934 || TREE_CODE (*p) == ADDR_EXPR
12935 || TREE_CODE (*p) == MEM_REF
12936 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12937 p = &TREE_OPERAND (*p, 0);
12938 tree d = *p;
12939 if (is_variable_sized (d))
12941 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12942 d = DECL_VALUE_EXPR (d);
12943 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12944 d = TREE_OPERAND (d, 0);
12945 gcc_assert (DECL_P (d));
12947 splay_tree_key key
12948 = (splay_tree_key) &DECL_CONTEXT (d);
12949 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12950 key)->value;
12951 if (d == *p)
12952 *p = nd;
12953 else
12954 *p = build_fold_indirect_ref (nd);
12956 v = build_fold_addr_expr_with_type (v, ptr_type_node);
12957 gimplify_assign (x, v, &ilist);
12958 nc = NULL_TREE;
12961 else
12963 if (DECL_SIZE (ovar)
12964 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12966 tree ovar2 = DECL_VALUE_EXPR (ovar);
12967 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12968 ovar2 = TREE_OPERAND (ovar2, 0);
12969 gcc_assert (DECL_P (ovar2));
12970 ovar = ovar2;
12972 if (!maybe_lookup_field (ovar, ctx)
12973 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12974 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12975 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12976 continue;
12979 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12980 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12981 talign = DECL_ALIGN_UNIT (ovar);
12983 var = NULL_TREE;
12984 if (nc)
12986 if (in_reduction_clauses
12987 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12988 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12990 tree d = ovar;
12991 if (is_variable_sized (d))
12993 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12994 d = DECL_VALUE_EXPR (d);
12995 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12996 d = TREE_OPERAND (d, 0);
12997 gcc_assert (DECL_P (d));
12999 splay_tree_key key
13000 = (splay_tree_key) &DECL_CONTEXT (d);
13001 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13002 key)->value;
13003 if (d == ovar)
13004 var = nd;
13005 else
13006 var = build_fold_indirect_ref (nd);
13008 else
13009 var = lookup_decl_in_outer_ctx (ovar, ctx);
13011 if (nc
13012 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13013 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13014 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13015 && is_omp_target (stmt))
13017 x = build_sender_ref (c, ctx);
13018 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13020 else if (nc)
13022 x = build_sender_ref (ovar, ctx);
13024 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13025 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13026 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13027 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13029 gcc_assert (offloaded);
13030 tree avar
13031 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13032 mark_addressable (avar);
13033 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13034 talign = DECL_ALIGN_UNIT (avar);
13035 avar = build_fold_addr_expr (avar);
13036 gimplify_assign (x, avar, &ilist);
13038 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13040 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13041 if (!omp_privatize_by_reference (var))
13043 if (is_gimple_reg (var)
13044 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13045 suppress_warning (var);
13046 var = build_fold_addr_expr (var);
13048 else
13049 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13050 gimplify_assign (x, var, &ilist);
13052 else if (is_gimple_reg (var))
13054 gcc_assert (offloaded);
13055 tree avar = create_tmp_var (TREE_TYPE (var));
13056 mark_addressable (avar);
13057 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13058 if (GOMP_MAP_COPY_TO_P (map_kind)
13059 || map_kind == GOMP_MAP_POINTER
13060 || map_kind == GOMP_MAP_TO_PSET
13061 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13063 /* If we need to initialize a temporary
13064 with VAR because it is not addressable, and
13065 the variable hasn't been initialized yet, then
13066 we'll get a warning for the store to avar.
13067 Don't warn in that case, the mapping might
13068 be implicit. */
13069 suppress_warning (var, OPT_Wuninitialized);
13070 gimplify_assign (avar, var, &ilist);
13072 avar = build_fold_addr_expr (avar);
13073 gimplify_assign (x, avar, &ilist);
13074 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13075 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13076 && !TYPE_READONLY (TREE_TYPE (var)))
13078 x = unshare_expr (x);
13079 x = build_simple_mem_ref (x);
13080 gimplify_assign (var, x, &olist);
13083 else
13085 /* While MAP is handled explicitly by the FE,
13086 for 'target update', only the identified is passed. */
13087 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13088 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13089 && (omp_is_allocatable_or_ptr (var)
13090 && omp_check_optional_argument (var, false)))
13091 var = build_fold_indirect_ref (var);
13092 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13093 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13094 || (!omp_is_allocatable_or_ptr (var)
13095 && !omp_check_optional_argument (var, false)))
13096 var = build_fold_addr_expr (var);
13097 gimplify_assign (x, var, &ilist);
13100 s = NULL_TREE;
13101 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13103 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13104 s = TREE_TYPE (ovar);
13105 if (TREE_CODE (s) == REFERENCE_TYPE
13106 || omp_check_optional_argument (ovar, false))
13107 s = TREE_TYPE (s);
13108 s = TYPE_SIZE_UNIT (s);
13110 else
13111 s = OMP_CLAUSE_SIZE (c);
13112 if (s == NULL_TREE)
13113 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13114 s = fold_convert (size_type_node, s);
13115 purpose = size_int (map_idx++);
13116 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13117 if (TREE_CODE (s) != INTEGER_CST)
13118 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13120 unsigned HOST_WIDE_INT tkind, tkind_zero;
13121 switch (OMP_CLAUSE_CODE (c))
13123 case OMP_CLAUSE_MAP:
13124 tkind = OMP_CLAUSE_MAP_KIND (c);
13125 tkind_zero = tkind;
13126 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13127 switch (tkind)
13129 case GOMP_MAP_ALLOC:
13130 case GOMP_MAP_IF_PRESENT:
13131 case GOMP_MAP_TO:
13132 case GOMP_MAP_FROM:
13133 case GOMP_MAP_TOFROM:
13134 case GOMP_MAP_ALWAYS_TO:
13135 case GOMP_MAP_ALWAYS_FROM:
13136 case GOMP_MAP_ALWAYS_TOFROM:
13137 case GOMP_MAP_RELEASE:
13138 case GOMP_MAP_FORCE_TO:
13139 case GOMP_MAP_FORCE_FROM:
13140 case GOMP_MAP_FORCE_TOFROM:
13141 case GOMP_MAP_FORCE_PRESENT:
13142 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13143 break;
13144 case GOMP_MAP_DELETE:
13145 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13146 default:
13147 break;
13149 if (tkind_zero != tkind)
13151 if (integer_zerop (s))
13152 tkind = tkind_zero;
13153 else if (integer_nonzerop (s))
13154 tkind_zero = tkind;
13156 break;
13157 case OMP_CLAUSE_FIRSTPRIVATE:
13158 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13159 tkind = GOMP_MAP_TO;
13160 tkind_zero = tkind;
13161 break;
13162 case OMP_CLAUSE_TO:
13163 tkind = GOMP_MAP_TO;
13164 tkind_zero = tkind;
13165 break;
13166 case OMP_CLAUSE_FROM:
13167 tkind = GOMP_MAP_FROM;
13168 tkind_zero = tkind;
13169 break;
13170 default:
13171 gcc_unreachable ();
13173 gcc_checking_assert (tkind
13174 < (HOST_WIDE_INT_C (1U) << talign_shift));
13175 gcc_checking_assert (tkind_zero
13176 < (HOST_WIDE_INT_C (1U) << talign_shift));
13177 talign = ceil_log2 (talign);
13178 tkind |= talign << talign_shift;
13179 tkind_zero |= talign << talign_shift;
13180 gcc_checking_assert (tkind
13181 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13182 gcc_checking_assert (tkind_zero
13183 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13184 if (tkind == tkind_zero)
13185 x = build_int_cstu (tkind_type, tkind);
13186 else
13188 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13189 x = build3 (COND_EXPR, tkind_type,
13190 fold_build2 (EQ_EXPR, boolean_type_node,
13191 unshare_expr (s), size_zero_node),
13192 build_int_cstu (tkind_type, tkind_zero),
13193 build_int_cstu (tkind_type, tkind));
13195 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13196 if (nc && nc != c)
13197 c = nc;
13198 break;
13200 case OMP_CLAUSE_FIRSTPRIVATE:
13201 if (is_gimple_omp_oacc (ctx->stmt))
13202 goto oacc_firstprivate_map;
13203 ovar = OMP_CLAUSE_DECL (c);
13204 if (omp_privatize_by_reference (ovar))
13205 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13206 else
13207 talign = DECL_ALIGN_UNIT (ovar);
13208 var = lookup_decl_in_outer_ctx (ovar, ctx);
13209 x = build_sender_ref (ovar, ctx);
13210 tkind = GOMP_MAP_FIRSTPRIVATE;
13211 type = TREE_TYPE (ovar);
13212 if (omp_privatize_by_reference (ovar))
13213 type = TREE_TYPE (type);
13214 if ((INTEGRAL_TYPE_P (type)
13215 && TYPE_PRECISION (type) <= POINTER_SIZE)
13216 || TREE_CODE (type) == POINTER_TYPE)
13218 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13219 tree t = var;
13220 if (omp_privatize_by_reference (var))
13221 t = build_simple_mem_ref (var);
13222 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13223 suppress_warning (var);
13224 if (TREE_CODE (type) != POINTER_TYPE)
13225 t = fold_convert (pointer_sized_int_node, t);
13226 t = fold_convert (TREE_TYPE (x), t);
13227 gimplify_assign (x, t, &ilist);
13229 else if (omp_privatize_by_reference (var))
13230 gimplify_assign (x, var, &ilist);
13231 else if (is_gimple_reg (var))
13233 tree avar = create_tmp_var (TREE_TYPE (var));
13234 mark_addressable (avar);
13235 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13236 suppress_warning (var);
13237 gimplify_assign (avar, var, &ilist);
13238 avar = build_fold_addr_expr (avar);
13239 gimplify_assign (x, avar, &ilist);
13241 else
13243 var = build_fold_addr_expr (var);
13244 gimplify_assign (x, var, &ilist);
13246 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13247 s = size_int (0);
13248 else if (omp_privatize_by_reference (ovar))
13249 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13250 else
13251 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13252 s = fold_convert (size_type_node, s);
13253 purpose = size_int (map_idx++);
13254 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13255 if (TREE_CODE (s) != INTEGER_CST)
13256 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13258 gcc_checking_assert (tkind
13259 < (HOST_WIDE_INT_C (1U) << talign_shift));
13260 talign = ceil_log2 (talign);
13261 tkind |= talign << talign_shift;
13262 gcc_checking_assert (tkind
13263 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13264 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13265 build_int_cstu (tkind_type, tkind));
13266 break;
13268 case OMP_CLAUSE_USE_DEVICE_PTR:
13269 case OMP_CLAUSE_USE_DEVICE_ADDR:
13270 case OMP_CLAUSE_IS_DEVICE_PTR:
13271 ovar = OMP_CLAUSE_DECL (c);
13272 var = lookup_decl_in_outer_ctx (ovar, ctx);
13274 if (lang_hooks.decls.omp_array_data (ovar, true))
13276 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13277 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13278 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13280 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13282 tkind = GOMP_MAP_USE_DEVICE_PTR;
13283 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13285 else
13287 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13288 x = build_sender_ref (ovar, ctx);
13291 if (is_gimple_omp_oacc (ctx->stmt))
13293 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13295 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13296 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13299 type = TREE_TYPE (ovar);
13300 if (lang_hooks.decls.omp_array_data (ovar, true))
13301 var = lang_hooks.decls.omp_array_data (ovar, false);
13302 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13303 && !omp_privatize_by_reference (ovar)
13304 && !omp_is_allocatable_or_ptr (ovar))
13305 || TREE_CODE (type) == ARRAY_TYPE)
13306 var = build_fold_addr_expr (var);
13307 else
13309 if (omp_privatize_by_reference (ovar)
13310 || omp_check_optional_argument (ovar, false)
13311 || omp_is_allocatable_or_ptr (ovar))
13313 type = TREE_TYPE (type);
13314 if (POINTER_TYPE_P (type)
13315 && TREE_CODE (type) != ARRAY_TYPE
13316 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13317 && !omp_is_allocatable_or_ptr (ovar))
13318 || (omp_privatize_by_reference (ovar)
13319 && omp_is_allocatable_or_ptr (ovar))))
13320 var = build_simple_mem_ref (var);
13321 var = fold_convert (TREE_TYPE (x), var);
13324 tree present;
13325 present = omp_check_optional_argument (ovar, true);
13326 if (present)
13328 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13329 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13330 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13331 tree new_x = unshare_expr (x);
13332 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13333 fb_rvalue);
13334 gcond *cond = gimple_build_cond_from_tree (present,
13335 notnull_label,
13336 null_label);
13337 gimple_seq_add_stmt (&ilist, cond);
13338 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13339 gimplify_assign (new_x, null_pointer_node, &ilist);
13340 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13341 gimple_seq_add_stmt (&ilist,
13342 gimple_build_label (notnull_label));
13343 gimplify_assign (x, var, &ilist);
13344 gimple_seq_add_stmt (&ilist,
13345 gimple_build_label (opt_arg_label));
13347 else
13348 gimplify_assign (x, var, &ilist);
13349 s = size_int (0);
13350 purpose = size_int (map_idx++);
13351 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13352 gcc_checking_assert (tkind
13353 < (HOST_WIDE_INT_C (1U) << talign_shift));
13354 gcc_checking_assert (tkind
13355 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13356 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13357 build_int_cstu (tkind_type, tkind));
13358 break;
13361 gcc_assert (map_idx == map_cnt);
13363 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13364 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13365 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13366 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13367 for (int i = 1; i <= 2; i++)
13368 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13370 gimple_seq initlist = NULL;
13371 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13372 TREE_VEC_ELT (t, i)),
13373 &initlist, true, NULL_TREE);
13374 gimple_seq_add_seq (&ilist, initlist);
13376 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13377 gimple_seq_add_stmt (&olist,
13378 gimple_build_assign (TREE_VEC_ELT (t, i),
13379 clobber));
13381 else if (omp_maybe_offloaded_ctx (ctx->outer))
13383 tree id = get_identifier ("omp declare target");
13384 tree decl = TREE_VEC_ELT (t, i);
13385 DECL_ATTRIBUTES (decl)
13386 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13387 varpool_node *node = varpool_node::get (decl);
13388 if (node)
13390 node->offloadable = 1;
13391 if (ENABLE_OFFLOADING)
13393 g->have_offload = true;
13394 vec_safe_push (offload_vars, t);
13399 tree clobber = build_clobber (ctx->record_type);
13400 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13401 clobber));
13404 /* Once all the expansions are done, sequence all the different
13405 fragments inside gimple_omp_body. */
13407 new_body = NULL;
13409 if (offloaded
13410 && ctx->record_type)
13412 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13413 /* fixup_child_record_type might have changed receiver_decl's type. */
13414 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13415 gimple_seq_add_stmt (&new_body,
13416 gimple_build_assign (ctx->receiver_decl, t));
13418 gimple_seq_add_seq (&new_body, fplist);
13420 if (offloaded || data_region)
13422 tree prev = NULL_TREE;
13423 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13424 switch (OMP_CLAUSE_CODE (c))
13426 tree var, x;
13427 default:
13428 break;
13429 case OMP_CLAUSE_FIRSTPRIVATE:
13430 if (is_gimple_omp_oacc (ctx->stmt))
13431 break;
13432 var = OMP_CLAUSE_DECL (c);
13433 if (omp_privatize_by_reference (var)
13434 || is_gimple_reg_type (TREE_TYPE (var)))
13436 tree new_var = lookup_decl (var, ctx);
13437 tree type;
13438 type = TREE_TYPE (var);
13439 if (omp_privatize_by_reference (var))
13440 type = TREE_TYPE (type);
13441 if ((INTEGRAL_TYPE_P (type)
13442 && TYPE_PRECISION (type) <= POINTER_SIZE)
13443 || TREE_CODE (type) == POINTER_TYPE)
13445 x = build_receiver_ref (var, false, ctx);
13446 if (TREE_CODE (type) != POINTER_TYPE)
13447 x = fold_convert (pointer_sized_int_node, x);
13448 x = fold_convert (type, x);
13449 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13450 fb_rvalue);
13451 if (omp_privatize_by_reference (var))
13453 tree v = create_tmp_var_raw (type, get_name (var));
13454 gimple_add_tmp_var (v);
13455 TREE_ADDRESSABLE (v) = 1;
13456 gimple_seq_add_stmt (&new_body,
13457 gimple_build_assign (v, x));
13458 x = build_fold_addr_expr (v);
13460 gimple_seq_add_stmt (&new_body,
13461 gimple_build_assign (new_var, x));
13463 else
13465 bool by_ref = !omp_privatize_by_reference (var);
13466 x = build_receiver_ref (var, by_ref, ctx);
13467 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13468 fb_rvalue);
13469 gimple_seq_add_stmt (&new_body,
13470 gimple_build_assign (new_var, x));
13473 else if (is_variable_sized (var))
13475 tree pvar = DECL_VALUE_EXPR (var);
13476 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13477 pvar = TREE_OPERAND (pvar, 0);
13478 gcc_assert (DECL_P (pvar));
13479 tree new_var = lookup_decl (pvar, ctx);
13480 x = build_receiver_ref (var, false, ctx);
13481 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13482 gimple_seq_add_stmt (&new_body,
13483 gimple_build_assign (new_var, x));
13485 break;
13486 case OMP_CLAUSE_PRIVATE:
13487 if (is_gimple_omp_oacc (ctx->stmt))
13488 break;
13489 var = OMP_CLAUSE_DECL (c);
13490 if (omp_privatize_by_reference (var))
13492 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13493 tree new_var = lookup_decl (var, ctx);
13494 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13495 if (TREE_CONSTANT (x))
13497 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13498 get_name (var));
13499 gimple_add_tmp_var (x);
13500 TREE_ADDRESSABLE (x) = 1;
13501 x = build_fold_addr_expr_loc (clause_loc, x);
13503 else
13504 break;
13506 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13507 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13508 gimple_seq_add_stmt (&new_body,
13509 gimple_build_assign (new_var, x));
13511 break;
13512 case OMP_CLAUSE_USE_DEVICE_PTR:
13513 case OMP_CLAUSE_USE_DEVICE_ADDR:
13514 case OMP_CLAUSE_IS_DEVICE_PTR:
13515 tree new_var;
13516 gimple_seq assign_body;
13517 bool is_array_data;
13518 bool do_optional_check;
13519 assign_body = NULL;
13520 do_optional_check = false;
13521 var = OMP_CLAUSE_DECL (c);
13522 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13524 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13525 x = build_sender_ref (is_array_data
13526 ? (splay_tree_key) &DECL_NAME (var)
13527 : (splay_tree_key) &DECL_UID (var), ctx);
13528 else
13529 x = build_receiver_ref (var, false, ctx);
13531 if (is_array_data)
13533 bool is_ref = omp_privatize_by_reference (var);
13534 do_optional_check = true;
13535 /* First, we copy the descriptor data from the host; then
13536 we update its data to point to the target address. */
13537 new_var = lookup_decl (var, ctx);
13538 new_var = DECL_VALUE_EXPR (new_var);
13539 tree v = new_var;
13541 if (is_ref)
13543 var = build_fold_indirect_ref (var);
13544 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13545 fb_rvalue);
13546 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13547 gimple_add_tmp_var (v);
13548 TREE_ADDRESSABLE (v) = 1;
13549 gimple_seq_add_stmt (&assign_body,
13550 gimple_build_assign (v, var));
13551 tree rhs = build_fold_addr_expr (v);
13552 gimple_seq_add_stmt (&assign_body,
13553 gimple_build_assign (new_var, rhs));
13555 else
13556 gimple_seq_add_stmt (&assign_body,
13557 gimple_build_assign (new_var, var));
13559 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13560 gcc_assert (v2);
13561 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13562 gimple_seq_add_stmt (&assign_body,
13563 gimple_build_assign (v2, x));
13565 else if (is_variable_sized (var))
13567 tree pvar = DECL_VALUE_EXPR (var);
13568 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13569 pvar = TREE_OPERAND (pvar, 0);
13570 gcc_assert (DECL_P (pvar));
13571 new_var = lookup_decl (pvar, ctx);
13572 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13573 gimple_seq_add_stmt (&assign_body,
13574 gimple_build_assign (new_var, x));
13576 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13577 && !omp_privatize_by_reference (var)
13578 && !omp_is_allocatable_or_ptr (var))
13579 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13581 new_var = lookup_decl (var, ctx);
13582 new_var = DECL_VALUE_EXPR (new_var);
13583 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13584 new_var = TREE_OPERAND (new_var, 0);
13585 gcc_assert (DECL_P (new_var));
13586 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13587 gimple_seq_add_stmt (&assign_body,
13588 gimple_build_assign (new_var, x));
13590 else
13592 tree type = TREE_TYPE (var);
13593 new_var = lookup_decl (var, ctx);
13594 if (omp_privatize_by_reference (var))
13596 type = TREE_TYPE (type);
13597 if (POINTER_TYPE_P (type)
13598 && TREE_CODE (type) != ARRAY_TYPE
13599 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13600 || (omp_privatize_by_reference (var)
13601 && omp_is_allocatable_or_ptr (var))))
13603 tree v = create_tmp_var_raw (type, get_name (var));
13604 gimple_add_tmp_var (v);
13605 TREE_ADDRESSABLE (v) = 1;
13606 x = fold_convert (type, x);
13607 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13608 fb_rvalue);
13609 gimple_seq_add_stmt (&assign_body,
13610 gimple_build_assign (v, x));
13611 x = build_fold_addr_expr (v);
13612 do_optional_check = true;
13615 new_var = DECL_VALUE_EXPR (new_var);
13616 x = fold_convert (TREE_TYPE (new_var), x);
13617 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13618 gimple_seq_add_stmt (&assign_body,
13619 gimple_build_assign (new_var, x));
13621 tree present;
13622 present = (do_optional_check
13623 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13624 : NULL_TREE);
13625 if (present)
13627 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13628 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13629 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13630 glabel *null_glabel = gimple_build_label (null_label);
13631 glabel *notnull_glabel = gimple_build_label (notnull_label);
13632 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13633 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13634 fb_rvalue);
13635 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13636 fb_rvalue);
13637 gcond *cond = gimple_build_cond_from_tree (present,
13638 notnull_label,
13639 null_label);
13640 gimple_seq_add_stmt (&new_body, cond);
13641 gimple_seq_add_stmt (&new_body, null_glabel);
13642 gimplify_assign (new_var, null_pointer_node, &new_body);
13643 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13644 gimple_seq_add_stmt (&new_body, notnull_glabel);
13645 gimple_seq_add_seq (&new_body, assign_body);
13646 gimple_seq_add_stmt (&new_body,
13647 gimple_build_label (opt_arg_label));
13649 else
13650 gimple_seq_add_seq (&new_body, assign_body);
13651 break;
13653 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13654 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13655 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13656 or references to VLAs. */
13657 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13658 switch (OMP_CLAUSE_CODE (c))
13660 tree var;
13661 default:
13662 break;
13663 case OMP_CLAUSE_MAP:
13664 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13665 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13667 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13668 poly_int64 offset = 0;
13669 gcc_assert (prev);
13670 var = OMP_CLAUSE_DECL (c);
13671 if (DECL_P (var)
13672 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13673 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13674 ctx))
13675 && varpool_node::get_create (var)->offloadable)
13676 break;
13677 if (TREE_CODE (var) == INDIRECT_REF
13678 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13679 var = TREE_OPERAND (var, 0);
13680 if (TREE_CODE (var) == COMPONENT_REF)
13682 var = get_addr_base_and_unit_offset (var, &offset);
13683 gcc_assert (var != NULL_TREE && DECL_P (var));
13685 else if (DECL_SIZE (var)
13686 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13688 tree var2 = DECL_VALUE_EXPR (var);
13689 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13690 var2 = TREE_OPERAND (var2, 0);
13691 gcc_assert (DECL_P (var2));
13692 var = var2;
13694 tree new_var = lookup_decl (var, ctx), x;
13695 tree type = TREE_TYPE (new_var);
13696 bool is_ref;
13697 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13698 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13699 == COMPONENT_REF))
13701 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13702 is_ref = true;
13703 new_var = build2 (MEM_REF, type,
13704 build_fold_addr_expr (new_var),
13705 build_int_cst (build_pointer_type (type),
13706 offset));
13708 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13710 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13711 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13712 new_var = build2 (MEM_REF, type,
13713 build_fold_addr_expr (new_var),
13714 build_int_cst (build_pointer_type (type),
13715 offset));
13717 else
13718 is_ref = omp_privatize_by_reference (var);
13719 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13720 is_ref = false;
13721 bool ref_to_array = false;
13722 if (is_ref)
13724 type = TREE_TYPE (type);
13725 if (TREE_CODE (type) == ARRAY_TYPE)
13727 type = build_pointer_type (type);
13728 ref_to_array = true;
13731 else if (TREE_CODE (type) == ARRAY_TYPE)
13733 tree decl2 = DECL_VALUE_EXPR (new_var);
13734 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13735 decl2 = TREE_OPERAND (decl2, 0);
13736 gcc_assert (DECL_P (decl2));
13737 new_var = decl2;
13738 type = TREE_TYPE (new_var);
13740 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13741 x = fold_convert_loc (clause_loc, type, x);
13742 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13744 tree bias = OMP_CLAUSE_SIZE (c);
13745 if (DECL_P (bias))
13746 bias = lookup_decl (bias, ctx);
13747 bias = fold_convert_loc (clause_loc, sizetype, bias);
13748 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13749 bias);
13750 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13751 TREE_TYPE (x), x, bias);
13753 if (ref_to_array)
13754 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13755 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13756 if (is_ref && !ref_to_array)
13758 tree t = create_tmp_var_raw (type, get_name (var));
13759 gimple_add_tmp_var (t);
13760 TREE_ADDRESSABLE (t) = 1;
13761 gimple_seq_add_stmt (&new_body,
13762 gimple_build_assign (t, x));
13763 x = build_fold_addr_expr_loc (clause_loc, t);
13765 gimple_seq_add_stmt (&new_body,
13766 gimple_build_assign (new_var, x));
13767 prev = NULL_TREE;
13769 else if (OMP_CLAUSE_CHAIN (c)
13770 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13771 == OMP_CLAUSE_MAP
13772 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13773 == GOMP_MAP_FIRSTPRIVATE_POINTER
13774 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13775 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13776 prev = c;
13777 break;
13778 case OMP_CLAUSE_PRIVATE:
13779 var = OMP_CLAUSE_DECL (c);
13780 if (is_variable_sized (var))
13782 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13783 tree new_var = lookup_decl (var, ctx);
13784 tree pvar = DECL_VALUE_EXPR (var);
13785 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13786 pvar = TREE_OPERAND (pvar, 0);
13787 gcc_assert (DECL_P (pvar));
13788 tree new_pvar = lookup_decl (pvar, ctx);
13789 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13790 tree al = size_int (DECL_ALIGN (var));
13791 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13792 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13793 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13794 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13795 gimple_seq_add_stmt (&new_body,
13796 gimple_build_assign (new_pvar, x));
13798 else if (omp_privatize_by_reference (var)
13799 && !is_gimple_omp_oacc (ctx->stmt))
13801 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13802 tree new_var = lookup_decl (var, ctx);
13803 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13804 if (TREE_CONSTANT (x))
13805 break;
13806 else
13808 tree atmp
13809 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13810 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13811 tree al = size_int (TYPE_ALIGN (rtype));
13812 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13815 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13816 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13817 gimple_seq_add_stmt (&new_body,
13818 gimple_build_assign (new_var, x));
13820 break;
13823 gimple_seq fork_seq = NULL;
13824 gimple_seq join_seq = NULL;
13826 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13828 /* If there are reductions on the offloaded region itself, treat
13829 them as a dummy GANG loop. */
13830 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13832 gcall *private_marker = lower_oacc_private_marker (ctx);
13834 if (private_marker)
13835 gimple_call_set_arg (private_marker, 2, level);
13837 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13838 false, NULL, private_marker, NULL, &fork_seq,
13839 &join_seq, ctx);
13842 gimple_seq_add_seq (&new_body, fork_seq);
13843 gimple_seq_add_seq (&new_body, tgt_body);
13844 gimple_seq_add_seq (&new_body, join_seq);
13846 if (offloaded)
13848 new_body = maybe_catch_exception (new_body);
13849 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13851 gimple_omp_set_body (stmt, new_body);
13854 bind = gimple_build_bind (NULL, NULL,
13855 tgt_bind ? gimple_bind_block (tgt_bind)
13856 : NULL_TREE);
13857 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13858 gimple_bind_add_seq (bind, ilist);
13859 gimple_bind_add_stmt (bind, stmt);
13860 gimple_bind_add_seq (bind, olist);
13862 pop_gimplify_context (NULL);
13864 if (dep_bind)
13866 gimple_bind_add_seq (dep_bind, dep_ilist);
13867 gimple_bind_add_stmt (dep_bind, bind);
13868 gimple_bind_add_seq (dep_bind, dep_olist);
13869 pop_gimplify_context (dep_bind);
13873 /* Expand code for an OpenMP teams directive. */
13875 static void
13876 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13878 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13879 push_gimplify_context ();
13881 tree block = make_node (BLOCK);
13882 gbind *bind = gimple_build_bind (NULL, NULL, block);
13883 gsi_replace (gsi_p, bind, true);
13884 gimple_seq bind_body = NULL;
13885 gimple_seq dlist = NULL;
13886 gimple_seq olist = NULL;
13888 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13889 OMP_CLAUSE_NUM_TEAMS);
13890 if (num_teams == NULL_TREE)
13891 num_teams = build_int_cst (unsigned_type_node, 0);
13892 else
13894 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13895 num_teams = fold_convert (unsigned_type_node, num_teams);
13896 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13898 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13899 OMP_CLAUSE_THREAD_LIMIT);
13900 if (thread_limit == NULL_TREE)
13901 thread_limit = build_int_cst (unsigned_type_node, 0);
13902 else
13904 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13905 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13906 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13907 fb_rvalue);
13910 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13911 &bind_body, &dlist, ctx, NULL);
13912 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13913 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13914 NULL, ctx);
13915 gimple_seq_add_stmt (&bind_body, teams_stmt);
13917 location_t loc = gimple_location (teams_stmt);
13918 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13919 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13920 gimple_set_location (call, loc);
13921 gimple_seq_add_stmt (&bind_body, call);
13923 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13924 gimple_omp_set_body (teams_stmt, NULL);
13925 gimple_seq_add_seq (&bind_body, olist);
13926 gimple_seq_add_seq (&bind_body, dlist);
13927 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13928 gimple_bind_set_body (bind, bind_body);
13930 pop_gimplify_context (bind);
13932 gimple_bind_append_vars (bind, ctx->block_vars);
13933 BLOCK_VARS (block) = ctx->block_vars;
13934 if (BLOCK_VARS (block))
13935 TREE_USED (block) = 1;
13938 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13939 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13940 of OMP context, but with task_shared_vars set. */
13942 static tree
13943 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13944 void *data)
13946 tree t = *tp;
13948 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13949 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
13950 && data == NULL
13951 && DECL_HAS_VALUE_EXPR_P (t))
13952 return t;
13954 if (task_shared_vars
13955 && DECL_P (t)
13956 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13957 return t;
13959 /* If a global variable has been privatized, TREE_CONSTANT on
13960 ADDR_EXPR might be wrong. */
13961 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13962 recompute_tree_invariant_for_addr_expr (t);
13964 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13965 return NULL_TREE;
13968 /* Data to be communicated between lower_omp_regimplify_operands and
13969 lower_omp_regimplify_operands_p. */
13971 struct lower_omp_regimplify_operands_data
13973 omp_context *ctx;
13974 vec<tree> *decls;
13977 /* Helper function for lower_omp_regimplify_operands. Find
13978 omp_member_access_dummy_var vars and adjust temporarily their
13979 DECL_VALUE_EXPRs if needed. */
13981 static tree
13982 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13983 void *data)
13985 tree t = omp_member_access_dummy_var (*tp);
13986 if (t)
13988 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13989 lower_omp_regimplify_operands_data *ldata
13990 = (lower_omp_regimplify_operands_data *) wi->info;
13991 tree o = maybe_lookup_decl (t, ldata->ctx);
13992 if (o != t)
13994 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13995 ldata->decls->safe_push (*tp);
13996 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13997 SET_DECL_VALUE_EXPR (*tp, v);
14000 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14001 return NULL_TREE;
14004 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14005 of omp_member_access_dummy_var vars during regimplification. */
14007 static void
14008 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14009 gimple_stmt_iterator *gsi_p)
14011 auto_vec<tree, 10> decls;
14012 if (ctx)
14014 struct walk_stmt_info wi;
14015 memset (&wi, '\0', sizeof (wi));
14016 struct lower_omp_regimplify_operands_data data;
14017 data.ctx = ctx;
14018 data.decls = &decls;
14019 wi.info = &data;
14020 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14022 gimple_regimplify_operands (stmt, gsi_p);
14023 while (!decls.is_empty ())
14025 tree t = decls.pop ();
14026 tree v = decls.pop ();
14027 SET_DECL_VALUE_EXPR (t, v);
14031 static void
14032 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14034 gimple *stmt = gsi_stmt (*gsi_p);
14035 struct walk_stmt_info wi;
14036 gcall *call_stmt;
14038 if (gimple_has_location (stmt))
14039 input_location = gimple_location (stmt);
14041 if (task_shared_vars)
14042 memset (&wi, '\0', sizeof (wi));
14044 /* If we have issued syntax errors, avoid doing any heavy lifting.
14045 Just replace the OMP directives with a NOP to avoid
14046 confusing RTL expansion. */
14047 if (seen_error () && is_gimple_omp (stmt))
14049 gsi_replace (gsi_p, gimple_build_nop (), true);
14050 return;
14053 switch (gimple_code (stmt))
14055 case GIMPLE_COND:
14057 gcond *cond_stmt = as_a <gcond *> (stmt);
14058 if ((ctx || task_shared_vars)
14059 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14060 lower_omp_regimplify_p,
14061 ctx ? NULL : &wi, NULL)
14062 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14063 lower_omp_regimplify_p,
14064 ctx ? NULL : &wi, NULL)))
14065 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14067 break;
14068 case GIMPLE_CATCH:
14069 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14070 break;
14071 case GIMPLE_EH_FILTER:
14072 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14073 break;
14074 case GIMPLE_TRY:
14075 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14076 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14077 break;
14078 case GIMPLE_TRANSACTION:
14079 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14080 ctx);
14081 break;
14082 case GIMPLE_BIND:
14083 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14085 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14086 oacc_privatization_scan_decl_chain (ctx, vars);
14088 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14089 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14090 break;
14091 case GIMPLE_OMP_PARALLEL:
14092 case GIMPLE_OMP_TASK:
14093 ctx = maybe_lookup_ctx (stmt);
14094 gcc_assert (ctx);
14095 if (ctx->cancellable)
14096 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14097 lower_omp_taskreg (gsi_p, ctx);
14098 break;
14099 case GIMPLE_OMP_FOR:
14100 ctx = maybe_lookup_ctx (stmt);
14101 gcc_assert (ctx);
14102 if (ctx->cancellable)
14103 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14104 lower_omp_for (gsi_p, ctx);
14105 break;
14106 case GIMPLE_OMP_SECTIONS:
14107 ctx = maybe_lookup_ctx (stmt);
14108 gcc_assert (ctx);
14109 if (ctx->cancellable)
14110 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14111 lower_omp_sections (gsi_p, ctx);
14112 break;
14113 case GIMPLE_OMP_SCOPE:
14114 ctx = maybe_lookup_ctx (stmt);
14115 gcc_assert (ctx);
14116 lower_omp_scope (gsi_p, ctx);
14117 break;
14118 case GIMPLE_OMP_SINGLE:
14119 ctx = maybe_lookup_ctx (stmt);
14120 gcc_assert (ctx);
14121 lower_omp_single (gsi_p, ctx);
14122 break;
14123 case GIMPLE_OMP_MASTER:
14124 case GIMPLE_OMP_MASKED:
14125 ctx = maybe_lookup_ctx (stmt);
14126 gcc_assert (ctx);
14127 lower_omp_master (gsi_p, ctx);
14128 break;
14129 case GIMPLE_OMP_TASKGROUP:
14130 ctx = maybe_lookup_ctx (stmt);
14131 gcc_assert (ctx);
14132 lower_omp_taskgroup (gsi_p, ctx);
14133 break;
14134 case GIMPLE_OMP_ORDERED:
14135 ctx = maybe_lookup_ctx (stmt);
14136 gcc_assert (ctx);
14137 lower_omp_ordered (gsi_p, ctx);
14138 break;
14139 case GIMPLE_OMP_SCAN:
14140 ctx = maybe_lookup_ctx (stmt);
14141 gcc_assert (ctx);
14142 lower_omp_scan (gsi_p, ctx);
14143 break;
14144 case GIMPLE_OMP_CRITICAL:
14145 ctx = maybe_lookup_ctx (stmt);
14146 gcc_assert (ctx);
14147 lower_omp_critical (gsi_p, ctx);
14148 break;
14149 case GIMPLE_OMP_ATOMIC_LOAD:
14150 if ((ctx || task_shared_vars)
14151 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14152 as_a <gomp_atomic_load *> (stmt)),
14153 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14154 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14155 break;
14156 case GIMPLE_OMP_TARGET:
14157 ctx = maybe_lookup_ctx (stmt);
14158 gcc_assert (ctx);
14159 lower_omp_target (gsi_p, ctx);
14160 break;
14161 case GIMPLE_OMP_TEAMS:
14162 ctx = maybe_lookup_ctx (stmt);
14163 gcc_assert (ctx);
14164 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14165 lower_omp_taskreg (gsi_p, ctx);
14166 else
14167 lower_omp_teams (gsi_p, ctx);
14168 break;
14169 case GIMPLE_CALL:
14170 tree fndecl;
14171 call_stmt = as_a <gcall *> (stmt);
14172 fndecl = gimple_call_fndecl (call_stmt);
14173 if (fndecl
14174 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14175 switch (DECL_FUNCTION_CODE (fndecl))
14177 case BUILT_IN_GOMP_BARRIER:
14178 if (ctx == NULL)
14179 break;
14180 /* FALLTHRU */
14181 case BUILT_IN_GOMP_CANCEL:
14182 case BUILT_IN_GOMP_CANCELLATION_POINT:
14183 omp_context *cctx;
14184 cctx = ctx;
14185 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14186 cctx = cctx->outer;
14187 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14188 if (!cctx->cancellable)
14190 if (DECL_FUNCTION_CODE (fndecl)
14191 == BUILT_IN_GOMP_CANCELLATION_POINT)
14193 stmt = gimple_build_nop ();
14194 gsi_replace (gsi_p, stmt, false);
14196 break;
14198 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14200 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14201 gimple_call_set_fndecl (call_stmt, fndecl);
14202 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14204 tree lhs;
14205 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14206 gimple_call_set_lhs (call_stmt, lhs);
14207 tree fallthru_label;
14208 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14209 gimple *g;
14210 g = gimple_build_label (fallthru_label);
14211 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14212 g = gimple_build_cond (NE_EXPR, lhs,
14213 fold_convert (TREE_TYPE (lhs),
14214 boolean_false_node),
14215 cctx->cancel_label, fallthru_label);
14216 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14217 break;
14218 default:
14219 break;
14221 goto regimplify;
14223 case GIMPLE_ASSIGN:
14224 for (omp_context *up = ctx; up; up = up->outer)
14226 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14227 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14228 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14229 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14230 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14231 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14232 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14233 && (gimple_omp_target_kind (up->stmt)
14234 == GF_OMP_TARGET_KIND_DATA)))
14235 continue;
14236 else if (!up->lastprivate_conditional_map)
14237 break;
14238 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14239 if (TREE_CODE (lhs) == MEM_REF
14240 && DECL_P (TREE_OPERAND (lhs, 0))
14241 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14242 0))) == REFERENCE_TYPE)
14243 lhs = TREE_OPERAND (lhs, 0);
14244 if (DECL_P (lhs))
14245 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14247 tree clauses;
14248 if (up->combined_into_simd_safelen1)
14250 up = up->outer;
14251 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14252 up = up->outer;
14254 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14255 clauses = gimple_omp_for_clauses (up->stmt);
14256 else
14257 clauses = gimple_omp_sections_clauses (up->stmt);
14258 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14259 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14260 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14261 OMP_CLAUSE__CONDTEMP_);
14262 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14263 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14264 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14267 /* FALLTHRU */
14269 default:
14270 regimplify:
14271 if ((ctx || task_shared_vars)
14272 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14273 ctx ? NULL : &wi))
14275 /* Just remove clobbers, this should happen only if we have
14276 "privatized" local addressable variables in SIMD regions,
14277 the clobber isn't needed in that case and gimplifying address
14278 of the ARRAY_REF into a pointer and creating MEM_REF based
14279 clobber would create worse code than we get with the clobber
14280 dropped. */
14281 if (gimple_clobber_p (stmt))
14283 gsi_replace (gsi_p, gimple_build_nop (), true);
14284 break;
14286 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14288 break;
14292 static void
14293 lower_omp (gimple_seq *body, omp_context *ctx)
14295 location_t saved_location = input_location;
14296 gimple_stmt_iterator gsi;
14297 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14298 lower_omp_1 (&gsi, ctx);
14299 /* During gimplification, we haven't folded statments inside offloading
14300 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14301 if (target_nesting_level || taskreg_nesting_level)
14302 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14303 fold_stmt (&gsi);
14304 input_location = saved_location;
14307 /* Main entry point. */
14309 static unsigned int
14310 execute_lower_omp (void)
14312 gimple_seq body;
14313 int i;
14314 omp_context *ctx;
14316 /* This pass always runs, to provide PROP_gimple_lomp.
14317 But often, there is nothing to do. */
14318 if (flag_openacc == 0 && flag_openmp == 0
14319 && flag_openmp_simd == 0)
14320 return 0;
14322 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14323 delete_omp_context);
14325 body = gimple_body (current_function_decl);
14327 scan_omp (&body, NULL);
14328 gcc_assert (taskreg_nesting_level == 0);
14329 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14330 finish_taskreg_scan (ctx);
14331 taskreg_contexts.release ();
14333 if (all_contexts->root)
14335 if (task_shared_vars)
14336 push_gimplify_context ();
14337 lower_omp (&body, NULL);
14338 if (task_shared_vars)
14339 pop_gimplify_context (NULL);
14342 if (all_contexts)
14344 splay_tree_delete (all_contexts);
14345 all_contexts = NULL;
14347 BITMAP_FREE (task_shared_vars);
14348 BITMAP_FREE (global_nonaddressable_vars);
14350 /* If current function is a method, remove artificial dummy VAR_DECL created
14351 for non-static data member privatization, they aren't needed for
14352 debuginfo nor anything else, have been already replaced everywhere in the
14353 IL and cause problems with LTO. */
14354 if (DECL_ARGUMENTS (current_function_decl)
14355 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14356 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14357 == POINTER_TYPE))
14358 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14359 return 0;
14362 namespace {
14364 const pass_data pass_data_lower_omp =
14366 GIMPLE_PASS, /* type */
14367 "omplower", /* name */
14368 OPTGROUP_OMP, /* optinfo_flags */
14369 TV_NONE, /* tv_id */
14370 PROP_gimple_any, /* properties_required */
14371 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14372 0, /* properties_destroyed */
14373 0, /* todo_flags_start */
14374 0, /* todo_flags_finish */
14377 class pass_lower_omp : public gimple_opt_pass
14379 public:
14380 pass_lower_omp (gcc::context *ctxt)
14381 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14384 /* opt_pass methods: */
14385 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14387 }; // class pass_lower_omp
14389 } // anon namespace
14391 gimple_opt_pass *
14392 make_pass_lower_omp (gcc::context *ctxt)
14394 return new pass_lower_omp (ctxt);
14397 /* The following is a utility to diagnose structured block violations.
14398 It is not part of the "omplower" pass, as that's invoked too late. It
14399 should be invoked by the respective front ends after gimplification. */
14401 static splay_tree all_labels;
14403 /* Check for mismatched contexts and generate an error if needed. Return
14404 true if an error is detected. */
14406 static bool
14407 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14408 gimple *branch_ctx, gimple *label_ctx)
14410 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14411 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14413 if (label_ctx == branch_ctx)
14414 return false;
14416 const char* kind = NULL;
14418 if (flag_openacc)
14420 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14421 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14423 gcc_checking_assert (kind == NULL);
14424 kind = "OpenACC";
14427 if (kind == NULL)
14429 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14430 kind = "OpenMP";
14433 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14434 so we could traverse it and issue a correct "exit" or "enter" error
14435 message upon a structured block violation.
14437 We built the context by building a list with tree_cons'ing, but there is
14438 no easy counterpart in gimple tuples. It seems like far too much work
14439 for issuing exit/enter error messages. If someone really misses the
14440 distinct error message... patches welcome. */
14442 #if 0
14443 /* Try to avoid confusing the user by producing and error message
14444 with correct "exit" or "enter" verbiage. We prefer "exit"
14445 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14446 if (branch_ctx == NULL)
14447 exit_p = false;
14448 else
14450 while (label_ctx)
14452 if (TREE_VALUE (label_ctx) == branch_ctx)
14454 exit_p = false;
14455 break;
14457 label_ctx = TREE_CHAIN (label_ctx);
14461 if (exit_p)
14462 error ("invalid exit from %s structured block", kind);
14463 else
14464 error ("invalid entry to %s structured block", kind);
14465 #endif
14467 /* If it's obvious we have an invalid entry, be specific about the error. */
14468 if (branch_ctx == NULL)
14469 error ("invalid entry to %s structured block", kind);
14470 else
14472 /* Otherwise, be vague and lazy, but efficient. */
14473 error ("invalid branch to/from %s structured block", kind);
14476 gsi_replace (gsi_p, gimple_build_nop (), false);
14477 return true;
14480 /* Pass 1: Create a minimal tree of structured blocks, and record
14481 where each label is found. */
14483 static tree
14484 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14485 struct walk_stmt_info *wi)
14487 gimple *context = (gimple *) wi->info;
14488 gimple *inner_context;
14489 gimple *stmt = gsi_stmt (*gsi_p);
14491 *handled_ops_p = true;
14493 switch (gimple_code (stmt))
14495 WALK_SUBSTMTS;
14497 case GIMPLE_OMP_PARALLEL:
14498 case GIMPLE_OMP_TASK:
14499 case GIMPLE_OMP_SCOPE:
14500 case GIMPLE_OMP_SECTIONS:
14501 case GIMPLE_OMP_SINGLE:
14502 case GIMPLE_OMP_SECTION:
14503 case GIMPLE_OMP_MASTER:
14504 case GIMPLE_OMP_MASKED:
14505 case GIMPLE_OMP_ORDERED:
14506 case GIMPLE_OMP_SCAN:
14507 case GIMPLE_OMP_CRITICAL:
14508 case GIMPLE_OMP_TARGET:
14509 case GIMPLE_OMP_TEAMS:
14510 case GIMPLE_OMP_TASKGROUP:
14511 /* The minimal context here is just the current OMP construct. */
14512 inner_context = stmt;
14513 wi->info = inner_context;
14514 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14515 wi->info = context;
14516 break;
14518 case GIMPLE_OMP_FOR:
14519 inner_context = stmt;
14520 wi->info = inner_context;
14521 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14522 walk them. */
14523 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14524 diagnose_sb_1, NULL, wi);
14525 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14526 wi->info = context;
14527 break;
14529 case GIMPLE_LABEL:
14530 splay_tree_insert (all_labels,
14531 (splay_tree_key) gimple_label_label (
14532 as_a <glabel *> (stmt)),
14533 (splay_tree_value) context);
14534 break;
14536 default:
14537 break;
14540 return NULL_TREE;
14543 /* Pass 2: Check each branch and see if its context differs from that of
14544 the destination label's context. */
14546 static tree
14547 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14548 struct walk_stmt_info *wi)
14550 gimple *context = (gimple *) wi->info;
14551 splay_tree_node n;
14552 gimple *stmt = gsi_stmt (*gsi_p);
14554 *handled_ops_p = true;
14556 switch (gimple_code (stmt))
14558 WALK_SUBSTMTS;
14560 case GIMPLE_OMP_PARALLEL:
14561 case GIMPLE_OMP_TASK:
14562 case GIMPLE_OMP_SCOPE:
14563 case GIMPLE_OMP_SECTIONS:
14564 case GIMPLE_OMP_SINGLE:
14565 case GIMPLE_OMP_SECTION:
14566 case GIMPLE_OMP_MASTER:
14567 case GIMPLE_OMP_MASKED:
14568 case GIMPLE_OMP_ORDERED:
14569 case GIMPLE_OMP_SCAN:
14570 case GIMPLE_OMP_CRITICAL:
14571 case GIMPLE_OMP_TARGET:
14572 case GIMPLE_OMP_TEAMS:
14573 case GIMPLE_OMP_TASKGROUP:
14574 wi->info = stmt;
14575 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14576 wi->info = context;
14577 break;
14579 case GIMPLE_OMP_FOR:
14580 wi->info = stmt;
14581 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14582 walk them. */
14583 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14584 diagnose_sb_2, NULL, wi);
14585 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14586 wi->info = context;
14587 break;
14589 case GIMPLE_COND:
14591 gcond *cond_stmt = as_a <gcond *> (stmt);
14592 tree lab = gimple_cond_true_label (cond_stmt);
14593 if (lab)
14595 n = splay_tree_lookup (all_labels,
14596 (splay_tree_key) lab);
14597 diagnose_sb_0 (gsi_p, context,
14598 n ? (gimple *) n->value : NULL);
14600 lab = gimple_cond_false_label (cond_stmt);
14601 if (lab)
14603 n = splay_tree_lookup (all_labels,
14604 (splay_tree_key) lab);
14605 diagnose_sb_0 (gsi_p, context,
14606 n ? (gimple *) n->value : NULL);
14609 break;
14611 case GIMPLE_GOTO:
14613 tree lab = gimple_goto_dest (stmt);
14614 if (TREE_CODE (lab) != LABEL_DECL)
14615 break;
14617 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14618 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14620 break;
14622 case GIMPLE_SWITCH:
14624 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14625 unsigned int i;
14626 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14628 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14629 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14630 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14631 break;
14634 break;
14636 case GIMPLE_RETURN:
14637 diagnose_sb_0 (gsi_p, context, NULL);
14638 break;
14640 default:
14641 break;
14644 return NULL_TREE;
14647 static unsigned int
14648 diagnose_omp_structured_block_errors (void)
14650 struct walk_stmt_info wi;
14651 gimple_seq body = gimple_body (current_function_decl);
14653 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14655 memset (&wi, 0, sizeof (wi));
14656 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14658 memset (&wi, 0, sizeof (wi));
14659 wi.want_locations = true;
14660 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14662 gimple_set_body (current_function_decl, body);
14664 splay_tree_delete (all_labels);
14665 all_labels = NULL;
14667 return 0;
14670 namespace {
14672 const pass_data pass_data_diagnose_omp_blocks =
14674 GIMPLE_PASS, /* type */
14675 "*diagnose_omp_blocks", /* name */
14676 OPTGROUP_OMP, /* optinfo_flags */
14677 TV_NONE, /* tv_id */
14678 PROP_gimple_any, /* properties_required */
14679 0, /* properties_provided */
14680 0, /* properties_destroyed */
14681 0, /* todo_flags_start */
14682 0, /* todo_flags_finish */
14685 class pass_diagnose_omp_blocks : public gimple_opt_pass
14687 public:
14688 pass_diagnose_omp_blocks (gcc::context *ctxt)
14689 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14692 /* opt_pass methods: */
14693 virtual bool gate (function *)
14695 return flag_openacc || flag_openmp || flag_openmp_simd;
14697 virtual unsigned int execute (function *)
14699 return diagnose_omp_structured_block_errors ();
14702 }; // class pass_diagnose_omp_blocks
14704 } // anon namespace
14706 gimple_opt_pass *
14707 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14709 return new pass_diagnose_omp_blocks (ctxt);
14713 #include "gt-omp-low.h"