For obj-c stage-final re-use the checksum from the previous stage
[official-gcc.git] / gcc / omp-low.c
blob2d5cdf671ebd365d6276f20fbd71cd0393ef64bc
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
198 #define WALK_SUBSTMTS \
199 case GIMPLE_BIND: \
200 case GIMPLE_TRY: \
201 case GIMPLE_CATCH: \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
206 break;
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
211 static bool
212 is_oacc_parallel_or_serial (omp_context *ctx)
214 enum gimple_code outer_type = gimple_code (ctx->stmt);
215 return ((outer_type == GIMPLE_OMP_TARGET)
216 && ((gimple_omp_target_kind (ctx->stmt)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 || (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
225 static bool
226 is_oacc_kernels (omp_context *ctx)
228 enum gimple_code outer_type = gimple_code (ctx->stmt);
229 return ((outer_type == GIMPLE_OMP_TARGET)
230 && (gimple_omp_target_kind (ctx->stmt)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
236 static bool
237 is_oacc_kernels_decomposed_part (omp_context *ctx)
239 enum gimple_code outer_type = gimple_code (ctx->stmt);
240 return ((outer_type == GIMPLE_OMP_TARGET)
241 && ((gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 || (gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
250 static bool
251 is_omp_target (gimple *stmt)
253 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
255 int kind = gimple_omp_target_kind (stmt);
256 return (kind == GF_OMP_TARGET_KIND_REGION
257 || kind == GF_OMP_TARGET_KIND_DATA
258 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
261 return false;
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
268 tree
269 omp_member_access_dummy_var (tree decl)
271 if (!VAR_P (decl)
272 || !DECL_ARTIFICIAL (decl)
273 || !DECL_IGNORED_P (decl)
274 || !DECL_HAS_VALUE_EXPR_P (decl)
275 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276 return NULL_TREE;
278 tree v = DECL_VALUE_EXPR (decl);
279 if (TREE_CODE (v) != COMPONENT_REF)
280 return NULL_TREE;
282 while (1)
283 switch (TREE_CODE (v))
285 case COMPONENT_REF:
286 case MEM_REF:
287 case INDIRECT_REF:
288 CASE_CONVERT:
289 case POINTER_PLUS_EXPR:
290 v = TREE_OPERAND (v, 0);
291 continue;
292 case PARM_DECL:
293 if (DECL_CONTEXT (v) == current_function_decl
294 && DECL_ARTIFICIAL (v)
295 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 return v;
297 return NULL_TREE;
298 default:
299 return NULL_TREE;
303 /* Helper for unshare_and_remap, called through walk_tree. */
305 static tree
306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
308 tree *pair = (tree *) data;
309 if (*tp == pair[0])
311 *tp = unshare_expr (pair[1]);
312 *walk_subtrees = 0;
314 else if (IS_TYPE_OR_DECL_P (*tp))
315 *walk_subtrees = 0;
316 return NULL_TREE;
319 /* Return unshare_expr (X) with all occurrences of FROM
320 replaced with TO. */
322 static tree
323 unshare_and_remap (tree x, tree from, tree to)
325 tree pair[2] = { from, to };
326 x = unshare_expr (x);
327 walk_tree (&x, unshare_and_remap_1, pair, NULL);
328 return x;
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
333 static inline tree
334 scan_omp_op (tree *tp, omp_context *ctx)
336 struct walk_stmt_info wi;
338 memset (&wi, 0, sizeof (wi));
339 wi.info = ctx;
340 wi.want_locations = true;
342 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
349 /* Return true if CTX is for an omp parallel. */
351 static inline bool
352 is_parallel_ctx (omp_context *ctx)
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
358 /* Return true if CTX is for an omp task. */
360 static inline bool
361 is_task_ctx (omp_context *ctx)
363 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
367 /* Return true if CTX is for an omp taskloop. */
369 static inline bool
370 is_taskloop_ctx (omp_context *ctx)
372 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
377 /* Return true if CTX is for a host omp teams. */
379 static inline bool
380 is_host_teams_ctx (omp_context *ctx)
382 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
390 static inline bool
391 is_taskreg_ctx (omp_context *ctx)
393 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
396 /* Return true if EXPR is variable sized. */
398 static inline bool
399 is_variable_sized (const_tree expr)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
408 static inline tree
409 lookup_decl (tree var, omp_context *ctx)
411 tree *n = ctx->cb.decl_map->get (var);
412 return *n;
415 static inline tree
416 maybe_lookup_decl (const_tree var, omp_context *ctx)
418 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419 return n ? *n : NULL_TREE;
422 static inline tree
423 lookup_field (tree var, omp_context *ctx)
425 splay_tree_node n;
426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427 return (tree) n->value;
430 static inline tree
431 lookup_sfield (splay_tree_key key, omp_context *ctx)
433 splay_tree_node n;
434 n = splay_tree_lookup (ctx->sfield_map
435 ? ctx->sfield_map : ctx->field_map, key);
436 return (tree) n->value;
439 static inline tree
440 lookup_sfield (tree var, omp_context *ctx)
442 return lookup_sfield ((splay_tree_key) var, ctx);
445 static inline tree
446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
448 splay_tree_node n;
449 n = splay_tree_lookup (ctx->field_map, key);
450 return n ? (tree) n->value : NULL_TREE;
453 static inline tree
454 maybe_lookup_field (tree var, omp_context *ctx)
456 return maybe_lookup_field ((splay_tree_key) var, ctx);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
462 static bool
463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466 || TYPE_ATOMIC (TREE_TYPE (decl)))
467 return true;
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
471 if (shared_ctx)
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 return true;
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 return true;
489 /* Do not use copy-in/copy-out for variables that have their
490 address taken. */
491 if (is_global_var (decl))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl))
501 if (!global_nonaddressable_vars)
502 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars,
507 DECL_UID (decl)))
508 return true;
510 else if (TREE_ADDRESSABLE (decl))
511 return true;
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
514 for these. */
515 if (TREE_READONLY (decl)
516 || ((TREE_CODE (decl) == RESULT_DECL
517 || TREE_CODE (decl) == PARM_DECL)
518 && DECL_BY_REFERENCE (decl)))
519 return false;
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx->is_nested)
528 omp_context *up;
530 for (up = shared_ctx->outer; up; up = up->outer)
531 if ((is_taskreg_ctx (up)
532 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up->stmt)))
534 && maybe_lookup_decl (decl, up))
535 break;
537 if (up)
539 tree c;
541 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
543 for (c = gimple_omp_target_clauses (up->stmt);
544 c; c = OMP_CLAUSE_CHAIN (c))
545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c) == decl)
547 break;
549 else
550 for (c = gimple_omp_taskreg_clauses (up->stmt);
551 c; c = OMP_CLAUSE_CHAIN (c))
552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c) == decl)
554 break;
556 if (c)
557 goto maybe_mark_addressable_and_ret;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx))
566 tree outer;
567 maybe_mark_addressable_and_ret:
568 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
573 variable. */
574 if (!task_shared_vars)
575 task_shared_vars = BITMAP_ALLOC (NULL);
576 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 TREE_ADDRESSABLE (outer) = 1;
579 return true;
583 return false;
586 /* Construct a new automatic decl similar to VAR. */
588 static tree
589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
591 tree copy = copy_var_decl (var, name, type);
593 DECL_CONTEXT (copy) = current_function_decl;
594 DECL_CHAIN (copy) = ctx->block_vars;
595 /* If VAR is listed in task_shared_vars, it means it wasn't
596 originally addressable and is just because task needs to take
597 it's address. But we don't need to take address of privatizations
598 from that var. */
599 if (TREE_ADDRESSABLE (var)
600 && ((task_shared_vars
601 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
602 || (global_nonaddressable_vars
603 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
604 TREE_ADDRESSABLE (copy) = 0;
605 ctx->block_vars = copy;
607 return copy;
610 static tree
611 omp_copy_decl_1 (tree var, omp_context *ctx)
613 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
616 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
617 as appropriate. */
618 static tree
619 omp_build_component_ref (tree obj, tree field)
621 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
622 if (TREE_THIS_VOLATILE (field))
623 TREE_THIS_VOLATILE (ret) |= 1;
624 if (TREE_READONLY (field))
625 TREE_READONLY (ret) |= 1;
626 return ret;
629 /* Build tree nodes to access the field for VAR on the receiver side. */
631 static tree
632 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
634 tree x, field = lookup_field (var, ctx);
636 /* If the receiver record type was remapped in the child function,
637 remap the field into the new record type. */
638 x = maybe_lookup_field (field, ctx);
639 if (x != NULL)
640 field = x;
642 x = build_simple_mem_ref (ctx->receiver_decl);
643 TREE_THIS_NOTRAP (x) = 1;
644 x = omp_build_component_ref (x, field);
645 if (by_ref)
647 x = build_simple_mem_ref (x);
648 TREE_THIS_NOTRAP (x) = 1;
651 return x;
654 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
655 of a parallel, this is a component reference; for workshare constructs
656 this is some variable. */
658 static tree
659 build_outer_var_ref (tree var, omp_context *ctx,
660 enum omp_clause_code code = OMP_CLAUSE_ERROR)
662 tree x;
663 omp_context *outer = ctx->outer;
664 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
665 outer = outer->outer;
667 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
668 x = var;
669 else if (is_variable_sized (var))
671 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
672 x = build_outer_var_ref (x, ctx, code);
673 x = build_simple_mem_ref (x);
675 else if (is_taskreg_ctx (ctx))
677 bool by_ref = use_pointer_for_field (var, NULL);
678 x = build_receiver_ref (var, by_ref, ctx);
680 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
681 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
682 || ctx->loop_p
683 || (code == OMP_CLAUSE_PRIVATE
684 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
685 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
686 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
688 /* #pragma omp simd isn't a worksharing construct, and can reference
689 even private vars in its linear etc. clauses.
690 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
691 to private vars in all worksharing constructs. */
692 x = NULL_TREE;
693 if (outer && is_taskreg_ctx (outer))
694 x = lookup_decl (var, outer);
695 else if (outer)
696 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
697 if (x == NULL_TREE)
698 x = var;
700 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
702 gcc_assert (outer);
703 splay_tree_node n
704 = splay_tree_lookup (outer->field_map,
705 (splay_tree_key) &DECL_UID (var));
706 if (n == NULL)
708 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
709 x = var;
710 else
711 x = lookup_decl (var, outer);
713 else
715 tree field = (tree) n->value;
716 /* If the receiver record type was remapped in the child function,
717 remap the field into the new record type. */
718 x = maybe_lookup_field (field, outer);
719 if (x != NULL)
720 field = x;
722 x = build_simple_mem_ref (outer->receiver_decl);
723 x = omp_build_component_ref (x, field);
724 if (use_pointer_for_field (var, outer))
725 x = build_simple_mem_ref (x);
728 else if (outer)
729 x = lookup_decl (var, outer);
730 else if (omp_is_reference (var))
731 /* This can happen with orphaned constructs. If var is reference, it is
732 possible it is shared and as such valid. */
733 x = var;
734 else if (omp_member_access_dummy_var (var))
735 x = var;
736 else
737 gcc_unreachable ();
739 if (x == var)
741 tree t = omp_member_access_dummy_var (var);
742 if (t)
744 x = DECL_VALUE_EXPR (var);
745 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
746 if (o != t)
747 x = unshare_and_remap (x, t, o);
748 else
749 x = unshare_expr (x);
753 if (omp_is_reference (var))
754 x = build_simple_mem_ref (x);
756 return x;
759 /* Build tree nodes to access the field for VAR on the sender side. */
761 static tree
762 build_sender_ref (splay_tree_key key, omp_context *ctx)
764 tree field = lookup_sfield (key, ctx);
765 return omp_build_component_ref (ctx->sender_decl, field);
768 static tree
769 build_sender_ref (tree var, omp_context *ctx)
771 return build_sender_ref ((splay_tree_key) var, ctx);
774 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
775 BASE_POINTERS_RESTRICT, declare the field with restrict. */
777 static void
778 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
780 tree field, type, sfield = NULL_TREE;
781 splay_tree_key key = (splay_tree_key) var;
783 if ((mask & 16) != 0)
785 key = (splay_tree_key) &DECL_NAME (var);
786 gcc_checking_assert (key != (splay_tree_key) var);
788 if ((mask & 8) != 0)
790 key = (splay_tree_key) &DECL_UID (var);
791 gcc_checking_assert (key != (splay_tree_key) var);
793 gcc_assert ((mask & 1) == 0
794 || !splay_tree_lookup (ctx->field_map, key));
795 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
796 || !splay_tree_lookup (ctx->sfield_map, key));
797 gcc_assert ((mask & 3) == 3
798 || !is_gimple_omp_oacc (ctx->stmt));
800 type = TREE_TYPE (var);
801 if ((mask & 16) != 0)
802 type = lang_hooks.decls.omp_array_data (var, true);
804 /* Prevent redeclaring the var in the split-off function with a restrict
805 pointer type. Note that we only clear type itself, restrict qualifiers in
806 the pointed-to type will be ignored by points-to analysis. */
807 if (POINTER_TYPE_P (type)
808 && TYPE_RESTRICT (type))
809 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
811 if (mask & 4)
813 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
814 type = build_pointer_type (build_pointer_type (type));
816 else if (by_ref)
817 type = build_pointer_type (type);
818 else if ((mask & (32 | 3)) == 1 && omp_is_reference (var))
819 type = TREE_TYPE (type);
821 field = build_decl (DECL_SOURCE_LOCATION (var),
822 FIELD_DECL, DECL_NAME (var), type);
824 /* Remember what variable this field was created for. This does have a
825 side effect of making dwarf2out ignore this member, so for helpful
826 debugging we clear it later in delete_omp_context. */
827 DECL_ABSTRACT_ORIGIN (field) = var;
828 if ((mask & 16) == 0 && type == TREE_TYPE (var))
830 SET_DECL_ALIGN (field, DECL_ALIGN (var));
831 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
832 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
834 else
835 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
837 if ((mask & 3) == 3)
839 insert_field_into_struct (ctx->record_type, field);
840 if (ctx->srecord_type)
842 sfield = build_decl (DECL_SOURCE_LOCATION (var),
843 FIELD_DECL, DECL_NAME (var), type);
844 DECL_ABSTRACT_ORIGIN (sfield) = var;
845 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
846 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
847 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
848 insert_field_into_struct (ctx->srecord_type, sfield);
851 else
853 if (ctx->srecord_type == NULL_TREE)
855 tree t;
857 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
858 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
859 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
861 sfield = build_decl (DECL_SOURCE_LOCATION (t),
862 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
863 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
864 insert_field_into_struct (ctx->srecord_type, sfield);
865 splay_tree_insert (ctx->sfield_map,
866 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
867 (splay_tree_value) sfield);
870 sfield = field;
871 insert_field_into_struct ((mask & 1) ? ctx->record_type
872 : ctx->srecord_type, field);
875 if (mask & 1)
876 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
877 if ((mask & 2) && ctx->sfield_map)
878 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
881 static tree
882 install_var_local (tree var, omp_context *ctx)
884 tree new_var = omp_copy_decl_1 (var, ctx);
885 insert_decl_map (&ctx->cb, var, new_var);
886 return new_var;
889 /* Adjust the replacement for DECL in CTX for the new context. This means
890 copying the DECL_VALUE_EXPR, and fixing up the type. */
892 static void
893 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
895 tree new_decl, size;
897 new_decl = lookup_decl (decl, ctx);
899 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
901 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
902 && DECL_HAS_VALUE_EXPR_P (decl))
904 tree ve = DECL_VALUE_EXPR (decl);
905 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
906 SET_DECL_VALUE_EXPR (new_decl, ve);
907 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
910 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
912 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
913 if (size == error_mark_node)
914 size = TYPE_SIZE (TREE_TYPE (new_decl));
915 DECL_SIZE (new_decl) = size;
917 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
918 if (size == error_mark_node)
919 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
920 DECL_SIZE_UNIT (new_decl) = size;
924 /* The callback for remap_decl. Search all containing contexts for a
925 mapping of the variable; this avoids having to duplicate the splay
926 tree ahead of time. We know a mapping doesn't already exist in the
927 given context. Create new mappings to implement default semantics. */
929 static tree
930 omp_copy_decl (tree var, copy_body_data *cb)
932 omp_context *ctx = (omp_context *) cb;
933 tree new_var;
935 if (TREE_CODE (var) == LABEL_DECL)
937 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
938 return var;
939 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
940 DECL_CONTEXT (new_var) = current_function_decl;
941 insert_decl_map (&ctx->cb, var, new_var);
942 return new_var;
945 while (!is_taskreg_ctx (ctx))
947 ctx = ctx->outer;
948 if (ctx == NULL)
949 return var;
950 new_var = maybe_lookup_decl (var, ctx);
951 if (new_var)
952 return new_var;
955 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
956 return var;
958 return error_mark_node;
961 /* Create a new context, with OUTER_CTX being the surrounding context. */
963 static omp_context *
964 new_omp_context (gimple *stmt, omp_context *outer_ctx)
966 omp_context *ctx = XCNEW (omp_context);
968 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
969 (splay_tree_value) ctx);
970 ctx->stmt = stmt;
972 if (outer_ctx)
974 ctx->outer = outer_ctx;
975 ctx->cb = outer_ctx->cb;
976 ctx->cb.block = NULL;
977 ctx->depth = outer_ctx->depth + 1;
979 else
981 ctx->cb.src_fn = current_function_decl;
982 ctx->cb.dst_fn = current_function_decl;
983 ctx->cb.src_node = cgraph_node::get (current_function_decl);
984 gcc_checking_assert (ctx->cb.src_node);
985 ctx->cb.dst_node = ctx->cb.src_node;
986 ctx->cb.src_cfun = cfun;
987 ctx->cb.copy_decl = omp_copy_decl;
988 ctx->cb.eh_lp_nr = 0;
989 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
990 ctx->cb.adjust_array_error_bounds = true;
991 ctx->cb.dont_remap_vla_if_no_change = true;
992 ctx->depth = 1;
995 ctx->cb.decl_map = new hash_map<tree, tree>;
997 return ctx;
1000 static gimple_seq maybe_catch_exception (gimple_seq);
1002 /* Finalize task copyfn. */
1004 static void
1005 finalize_task_copyfn (gomp_task *task_stmt)
1007 struct function *child_cfun;
1008 tree child_fn;
1009 gimple_seq seq = NULL, new_seq;
1010 gbind *bind;
1012 child_fn = gimple_omp_task_copy_fn (task_stmt);
1013 if (child_fn == NULL_TREE)
1014 return;
1016 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1017 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1019 push_cfun (child_cfun);
1020 bind = gimplify_body (child_fn, false);
1021 gimple_seq_add_stmt (&seq, bind);
1022 new_seq = maybe_catch_exception (seq);
1023 if (new_seq != seq)
1025 bind = gimple_build_bind (NULL, new_seq, NULL);
1026 seq = NULL;
1027 gimple_seq_add_stmt (&seq, bind);
1029 gimple_set_body (child_fn, seq);
1030 pop_cfun ();
1032 /* Inform the callgraph about the new function. */
1033 cgraph_node *node = cgraph_node::get_create (child_fn);
1034 node->parallelized_function = 1;
1035 cgraph_node::add_new_function (child_fn, false);
1038 /* Destroy a omp_context data structures. Called through the splay tree
1039 value delete callback. */
1041 static void
1042 delete_omp_context (splay_tree_value value)
1044 omp_context *ctx = (omp_context *) value;
1046 delete ctx->cb.decl_map;
1048 if (ctx->field_map)
1049 splay_tree_delete (ctx->field_map);
1050 if (ctx->sfield_map)
1051 splay_tree_delete (ctx->sfield_map);
1053 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1054 it produces corrupt debug information. */
1055 if (ctx->record_type)
1057 tree t;
1058 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1059 DECL_ABSTRACT_ORIGIN (t) = NULL;
1061 if (ctx->srecord_type)
1063 tree t;
1064 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1065 DECL_ABSTRACT_ORIGIN (t) = NULL;
1068 if (is_task_ctx (ctx))
1069 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1071 if (ctx->task_reduction_map)
1073 ctx->task_reductions.release ();
1074 delete ctx->task_reduction_map;
1077 delete ctx->lastprivate_conditional_map;
1078 delete ctx->allocate_map;
1080 XDELETE (ctx);
1083 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1084 context. */
1086 static void
1087 fixup_child_record_type (omp_context *ctx)
1089 tree f, type = ctx->record_type;
1091 if (!ctx->receiver_decl)
1092 return;
1093 /* ??? It isn't sufficient to just call remap_type here, because
1094 variably_modified_type_p doesn't work the way we expect for
1095 record types. Testing each field for whether it needs remapping
1096 and creating a new record by hand works, however. */
1097 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1098 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1099 break;
1100 if (f)
1102 tree name, new_fields = NULL;
1104 type = lang_hooks.types.make_type (RECORD_TYPE);
1105 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1106 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1107 TYPE_DECL, name, type);
1108 TYPE_NAME (type) = name;
1110 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1112 tree new_f = copy_node (f);
1113 DECL_CONTEXT (new_f) = type;
1114 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1115 DECL_CHAIN (new_f) = new_fields;
1116 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1117 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1118 &ctx->cb, NULL);
1119 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1120 &ctx->cb, NULL);
1121 new_fields = new_f;
1123 /* Arrange to be able to look up the receiver field
1124 given the sender field. */
1125 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1126 (splay_tree_value) new_f);
1128 TYPE_FIELDS (type) = nreverse (new_fields);
1129 layout_type (type);
1132 /* In a target region we never modify any of the pointers in *.omp_data_i,
1133 so attempt to help the optimizers. */
1134 if (is_gimple_omp_offloaded (ctx->stmt))
1135 type = build_qualified_type (type, TYPE_QUAL_CONST);
1137 TREE_TYPE (ctx->receiver_decl)
1138 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1141 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1142 specified by CLAUSES. */
1144 static void
1145 scan_sharing_clauses (tree clauses, omp_context *ctx)
1147 tree c, decl;
1148 bool scan_array_reductions = false;
1150 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1152 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1153 /* omp_default_mem_alloc is 1 */
1154 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))))
1156 if (ctx->allocate_map == NULL)
1157 ctx->allocate_map = new hash_map<tree, tree>;
1158 ctx->allocate_map->put (OMP_CLAUSE_DECL (c),
1159 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1160 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1161 : integer_zero_node);
1164 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1166 bool by_ref;
1168 switch (OMP_CLAUSE_CODE (c))
1170 case OMP_CLAUSE_PRIVATE:
1171 decl = OMP_CLAUSE_DECL (c);
1172 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1173 goto do_private;
1174 else if (!is_variable_sized (decl))
1175 install_var_local (decl, ctx);
1176 break;
1178 case OMP_CLAUSE_SHARED:
1179 decl = OMP_CLAUSE_DECL (c);
1180 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1181 ctx->allocate_map->remove (decl);
1182 /* Ignore shared directives in teams construct inside of
1183 target construct. */
1184 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1185 && !is_host_teams_ctx (ctx))
1187 /* Global variables don't need to be copied,
1188 the receiver side will use them directly. */
1189 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1190 if (is_global_var (odecl))
1191 break;
1192 insert_decl_map (&ctx->cb, decl, odecl);
1193 break;
1195 gcc_assert (is_taskreg_ctx (ctx));
1196 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1197 || !is_variable_sized (decl));
1198 /* Global variables don't need to be copied,
1199 the receiver side will use them directly. */
1200 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1201 break;
1202 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1204 use_pointer_for_field (decl, ctx);
1205 break;
1207 by_ref = use_pointer_for_field (decl, NULL);
1208 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1209 || TREE_ADDRESSABLE (decl)
1210 || by_ref
1211 || omp_is_reference (decl))
1213 by_ref = use_pointer_for_field (decl, ctx);
1214 install_var_field (decl, by_ref, 3, ctx);
1215 install_var_local (decl, ctx);
1216 break;
1218 /* We don't need to copy const scalar vars back. */
1219 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1220 goto do_private;
1222 case OMP_CLAUSE_REDUCTION:
1223 /* Collect 'reduction' clauses on OpenACC compute construct. */
1224 if (is_gimple_omp_oacc (ctx->stmt)
1225 && is_gimple_omp_offloaded (ctx->stmt))
1227 /* No 'reduction' clauses on OpenACC 'kernels'. */
1228 gcc_checking_assert (!is_oacc_kernels (ctx));
1229 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1230 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1232 ctx->local_reduction_clauses
1233 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1235 /* FALLTHRU */
1237 case OMP_CLAUSE_IN_REDUCTION:
1238 decl = OMP_CLAUSE_DECL (c);
1239 if (ctx->allocate_map
1240 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1241 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1242 || OMP_CLAUSE_REDUCTION_TASK (c)))
1243 || is_task_ctx (ctx)))
1245 /* For now. */
1246 if (ctx->allocate_map->get (decl))
1247 ctx->allocate_map->remove (decl);
1249 if (TREE_CODE (decl) == MEM_REF)
1251 tree t = TREE_OPERAND (decl, 0);
1252 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1253 t = TREE_OPERAND (t, 0);
1254 if (TREE_CODE (t) == INDIRECT_REF
1255 || TREE_CODE (t) == ADDR_EXPR)
1256 t = TREE_OPERAND (t, 0);
1257 install_var_local (t, ctx);
1258 if (is_taskreg_ctx (ctx)
1259 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1260 || (is_task_ctx (ctx)
1261 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1262 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1263 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1264 == POINTER_TYPE)))))
1265 && !is_variable_sized (t)
1266 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1267 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1268 && !is_task_ctx (ctx))))
1270 by_ref = use_pointer_for_field (t, NULL);
1271 if (is_task_ctx (ctx)
1272 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1273 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1275 install_var_field (t, false, 1, ctx);
1276 install_var_field (t, by_ref, 2, ctx);
1278 else
1279 install_var_field (t, by_ref, 3, ctx);
1281 break;
1283 if (is_task_ctx (ctx)
1284 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1285 && OMP_CLAUSE_REDUCTION_TASK (c)
1286 && is_parallel_ctx (ctx)))
1288 /* Global variables don't need to be copied,
1289 the receiver side will use them directly. */
1290 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1292 by_ref = use_pointer_for_field (decl, ctx);
1293 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1294 install_var_field (decl, by_ref, 3, ctx);
1296 install_var_local (decl, ctx);
1297 break;
1299 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1300 && OMP_CLAUSE_REDUCTION_TASK (c))
1302 install_var_local (decl, ctx);
1303 break;
1305 goto do_private;
1307 case OMP_CLAUSE_LASTPRIVATE:
1308 /* Let the corresponding firstprivate clause create
1309 the variable. */
1310 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1311 break;
1312 /* FALLTHRU */
1314 case OMP_CLAUSE_FIRSTPRIVATE:
1315 case OMP_CLAUSE_LINEAR:
1316 decl = OMP_CLAUSE_DECL (c);
1317 do_private:
1318 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1319 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1320 && is_gimple_omp_offloaded (ctx->stmt))
1322 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1323 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1324 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1325 install_var_field (decl, true, 3, ctx);
1326 else
1327 install_var_field (decl, false, 3, ctx);
1329 if (is_variable_sized (decl))
1331 if (is_task_ctx (ctx))
1333 if (ctx->allocate_map
1334 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1336 /* For now. */
1337 if (ctx->allocate_map->get (decl))
1338 ctx->allocate_map->remove (decl);
1340 install_var_field (decl, false, 1, ctx);
1342 break;
1344 else if (is_taskreg_ctx (ctx))
1346 bool global
1347 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1348 by_ref = use_pointer_for_field (decl, NULL);
1350 if (is_task_ctx (ctx)
1351 && (global || by_ref || omp_is_reference (decl)))
1353 if (ctx->allocate_map
1354 && ctx->allocate_map->get (decl))
1355 install_var_field (decl, by_ref, 32 | 1, ctx);
1356 else
1357 install_var_field (decl, false, 1, ctx);
1358 if (!global)
1359 install_var_field (decl, by_ref, 2, ctx);
1361 else if (!global)
1362 install_var_field (decl, by_ref, 3, ctx);
1364 install_var_local (decl, ctx);
1365 break;
1367 case OMP_CLAUSE_USE_DEVICE_PTR:
1368 case OMP_CLAUSE_USE_DEVICE_ADDR:
1369 decl = OMP_CLAUSE_DECL (c);
1371 /* Fortran array descriptors. */
1372 if (lang_hooks.decls.omp_array_data (decl, true))
1373 install_var_field (decl, false, 19, ctx);
1374 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1375 && !omp_is_reference (decl)
1376 && !omp_is_allocatable_or_ptr (decl))
1377 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1378 install_var_field (decl, true, 11, ctx);
1379 else
1380 install_var_field (decl, false, 11, ctx);
1381 if (DECL_SIZE (decl)
1382 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1384 tree decl2 = DECL_VALUE_EXPR (decl);
1385 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1386 decl2 = TREE_OPERAND (decl2, 0);
1387 gcc_assert (DECL_P (decl2));
1388 install_var_local (decl2, ctx);
1390 install_var_local (decl, ctx);
1391 break;
1393 case OMP_CLAUSE_IS_DEVICE_PTR:
1394 decl = OMP_CLAUSE_DECL (c);
1395 goto do_private;
1397 case OMP_CLAUSE__LOOPTEMP_:
1398 case OMP_CLAUSE__REDUCTEMP_:
1399 gcc_assert (is_taskreg_ctx (ctx));
1400 decl = OMP_CLAUSE_DECL (c);
1401 install_var_field (decl, false, 3, ctx);
1402 install_var_local (decl, ctx);
1403 break;
1405 case OMP_CLAUSE_COPYPRIVATE:
1406 case OMP_CLAUSE_COPYIN:
1407 decl = OMP_CLAUSE_DECL (c);
1408 by_ref = use_pointer_for_field (decl, NULL);
1409 install_var_field (decl, by_ref, 3, ctx);
1410 break;
1412 case OMP_CLAUSE_FINAL:
1413 case OMP_CLAUSE_IF:
1414 case OMP_CLAUSE_NUM_THREADS:
1415 case OMP_CLAUSE_NUM_TEAMS:
1416 case OMP_CLAUSE_THREAD_LIMIT:
1417 case OMP_CLAUSE_DEVICE:
1418 case OMP_CLAUSE_SCHEDULE:
1419 case OMP_CLAUSE_DIST_SCHEDULE:
1420 case OMP_CLAUSE_DEPEND:
1421 case OMP_CLAUSE_PRIORITY:
1422 case OMP_CLAUSE_GRAINSIZE:
1423 case OMP_CLAUSE_NUM_TASKS:
1424 case OMP_CLAUSE_NUM_GANGS:
1425 case OMP_CLAUSE_NUM_WORKERS:
1426 case OMP_CLAUSE_VECTOR_LENGTH:
1427 case OMP_CLAUSE_DETACH:
1428 if (ctx->outer)
1429 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1430 break;
1432 case OMP_CLAUSE_TO:
1433 case OMP_CLAUSE_FROM:
1434 case OMP_CLAUSE_MAP:
1435 if (ctx->outer)
1436 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1437 decl = OMP_CLAUSE_DECL (c);
1438 /* Global variables with "omp declare target" attribute
1439 don't need to be copied, the receiver side will use them
1440 directly. However, global variables with "omp declare target link"
1441 attribute need to be copied. Or when ALWAYS modifier is used. */
1442 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1443 && DECL_P (decl)
1444 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1445 && (OMP_CLAUSE_MAP_KIND (c)
1446 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1447 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1448 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1449 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1450 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1451 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1452 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1453 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1454 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1455 && varpool_node::get_create (decl)->offloadable
1456 && !lookup_attribute ("omp declare target link",
1457 DECL_ATTRIBUTES (decl)))
1458 break;
1459 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1460 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1462 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1463 not offloaded; there is nothing to map for those. */
1464 if (!is_gimple_omp_offloaded (ctx->stmt)
1465 && !POINTER_TYPE_P (TREE_TYPE (decl))
1466 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1467 break;
1469 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1470 && DECL_P (decl)
1471 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1472 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1473 && is_omp_target (ctx->stmt))
1475 /* If this is an offloaded region, an attach operation should
1476 only exist when the pointer variable is mapped in a prior
1477 clause. */
1478 if (is_gimple_omp_offloaded (ctx->stmt))
1479 gcc_assert
1480 (maybe_lookup_decl (decl, ctx)
1481 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1482 && lookup_attribute ("omp declare target",
1483 DECL_ATTRIBUTES (decl))));
1485 /* By itself, attach/detach is generated as part of pointer
1486 variable mapping and should not create new variables in the
1487 offloaded region, however sender refs for it must be created
1488 for its address to be passed to the runtime. */
1489 tree field
1490 = build_decl (OMP_CLAUSE_LOCATION (c),
1491 FIELD_DECL, NULL_TREE, ptr_type_node);
1492 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1493 insert_field_into_struct (ctx->record_type, field);
1494 /* To not clash with a map of the pointer variable itself,
1495 attach/detach maps have their field looked up by the *clause*
1496 tree expression, not the decl. */
1497 gcc_assert (!splay_tree_lookup (ctx->field_map,
1498 (splay_tree_key) c));
1499 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1500 (splay_tree_value) field);
1501 break;
1503 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1504 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1505 || (OMP_CLAUSE_MAP_KIND (c)
1506 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1508 if (TREE_CODE (decl) == COMPONENT_REF
1509 || (TREE_CODE (decl) == INDIRECT_REF
1510 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1511 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1512 == REFERENCE_TYPE)))
1513 break;
1514 if (DECL_SIZE (decl)
1515 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1517 tree decl2 = DECL_VALUE_EXPR (decl);
1518 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1519 decl2 = TREE_OPERAND (decl2, 0);
1520 gcc_assert (DECL_P (decl2));
1521 install_var_local (decl2, ctx);
1523 install_var_local (decl, ctx);
1524 break;
1526 if (DECL_P (decl))
1528 if (DECL_SIZE (decl)
1529 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1531 tree decl2 = DECL_VALUE_EXPR (decl);
1532 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1533 decl2 = TREE_OPERAND (decl2, 0);
1534 gcc_assert (DECL_P (decl2));
1535 install_var_field (decl2, true, 3, ctx);
1536 install_var_local (decl2, ctx);
1537 install_var_local (decl, ctx);
1539 else
1541 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1542 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1543 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1544 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1545 install_var_field (decl, true, 7, ctx);
1546 else
1547 install_var_field (decl, true, 3, ctx);
1548 if (is_gimple_omp_offloaded (ctx->stmt)
1549 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1550 install_var_local (decl, ctx);
1553 else
1555 tree base = get_base_address (decl);
1556 tree nc = OMP_CLAUSE_CHAIN (c);
1557 if (DECL_P (base)
1558 && nc != NULL_TREE
1559 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1560 && OMP_CLAUSE_DECL (nc) == base
1561 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1562 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1564 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1565 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1567 else
1569 if (ctx->outer)
1571 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1572 decl = OMP_CLAUSE_DECL (c);
1574 gcc_assert (!splay_tree_lookup (ctx->field_map,
1575 (splay_tree_key) decl));
1576 tree field
1577 = build_decl (OMP_CLAUSE_LOCATION (c),
1578 FIELD_DECL, NULL_TREE, ptr_type_node);
1579 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1580 insert_field_into_struct (ctx->record_type, field);
1581 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1582 (splay_tree_value) field);
1585 break;
1587 case OMP_CLAUSE_ORDER:
1588 ctx->order_concurrent = true;
1589 break;
1591 case OMP_CLAUSE_BIND:
1592 ctx->loop_p = true;
1593 break;
1595 case OMP_CLAUSE_NOWAIT:
1596 case OMP_CLAUSE_ORDERED:
1597 case OMP_CLAUSE_COLLAPSE:
1598 case OMP_CLAUSE_UNTIED:
1599 case OMP_CLAUSE_MERGEABLE:
1600 case OMP_CLAUSE_PROC_BIND:
1601 case OMP_CLAUSE_SAFELEN:
1602 case OMP_CLAUSE_SIMDLEN:
1603 case OMP_CLAUSE_THREADS:
1604 case OMP_CLAUSE_SIMD:
1605 case OMP_CLAUSE_NOGROUP:
1606 case OMP_CLAUSE_DEFAULTMAP:
1607 case OMP_CLAUSE_ASYNC:
1608 case OMP_CLAUSE_WAIT:
1609 case OMP_CLAUSE_GANG:
1610 case OMP_CLAUSE_WORKER:
1611 case OMP_CLAUSE_VECTOR:
1612 case OMP_CLAUSE_INDEPENDENT:
1613 case OMP_CLAUSE_AUTO:
1614 case OMP_CLAUSE_SEQ:
1615 case OMP_CLAUSE_TILE:
1616 case OMP_CLAUSE__SIMT_:
1617 case OMP_CLAUSE_DEFAULT:
1618 case OMP_CLAUSE_NONTEMPORAL:
1619 case OMP_CLAUSE_IF_PRESENT:
1620 case OMP_CLAUSE_FINALIZE:
1621 case OMP_CLAUSE_TASK_REDUCTION:
1622 case OMP_CLAUSE_ALLOCATE:
1623 break;
1625 case OMP_CLAUSE_ALIGNED:
1626 decl = OMP_CLAUSE_DECL (c);
1627 if (is_global_var (decl)
1628 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1629 install_var_local (decl, ctx);
1630 break;
1632 case OMP_CLAUSE__CONDTEMP_:
1633 decl = OMP_CLAUSE_DECL (c);
1634 if (is_parallel_ctx (ctx))
1636 install_var_field (decl, false, 3, ctx);
1637 install_var_local (decl, ctx);
1639 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1640 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1641 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1642 install_var_local (decl, ctx);
1643 break;
1645 case OMP_CLAUSE__CACHE_:
1646 default:
1647 gcc_unreachable ();
1651 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1653 switch (OMP_CLAUSE_CODE (c))
1655 case OMP_CLAUSE_LASTPRIVATE:
1656 /* Let the corresponding firstprivate clause create
1657 the variable. */
1658 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1659 scan_array_reductions = true;
1660 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1661 break;
1662 /* FALLTHRU */
1664 case OMP_CLAUSE_FIRSTPRIVATE:
1665 case OMP_CLAUSE_PRIVATE:
1666 case OMP_CLAUSE_LINEAR:
1667 case OMP_CLAUSE_IS_DEVICE_PTR:
1668 decl = OMP_CLAUSE_DECL (c);
1669 if (is_variable_sized (decl))
1671 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1672 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1673 && is_gimple_omp_offloaded (ctx->stmt))
1675 tree decl2 = DECL_VALUE_EXPR (decl);
1676 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1677 decl2 = TREE_OPERAND (decl2, 0);
1678 gcc_assert (DECL_P (decl2));
1679 install_var_local (decl2, ctx);
1680 fixup_remapped_decl (decl2, ctx, false);
1682 install_var_local (decl, ctx);
1684 fixup_remapped_decl (decl, ctx,
1685 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1686 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1687 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1688 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1689 scan_array_reductions = true;
1690 break;
1692 case OMP_CLAUSE_REDUCTION:
1693 case OMP_CLAUSE_IN_REDUCTION:
1694 decl = OMP_CLAUSE_DECL (c);
1695 if (TREE_CODE (decl) != MEM_REF)
1697 if (is_variable_sized (decl))
1698 install_var_local (decl, ctx);
1699 fixup_remapped_decl (decl, ctx, false);
1701 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1702 scan_array_reductions = true;
1703 break;
1705 case OMP_CLAUSE_TASK_REDUCTION:
1706 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1707 scan_array_reductions = true;
1708 break;
1710 case OMP_CLAUSE_SHARED:
1711 /* Ignore shared directives in teams construct inside of
1712 target construct. */
1713 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1714 && !is_host_teams_ctx (ctx))
1715 break;
1716 decl = OMP_CLAUSE_DECL (c);
1717 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1718 break;
1719 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1721 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1722 ctx->outer)))
1723 break;
1724 bool by_ref = use_pointer_for_field (decl, ctx);
1725 install_var_field (decl, by_ref, 11, ctx);
1726 break;
1728 fixup_remapped_decl (decl, ctx, false);
1729 break;
1731 case OMP_CLAUSE_MAP:
1732 if (!is_gimple_omp_offloaded (ctx->stmt))
1733 break;
1734 decl = OMP_CLAUSE_DECL (c);
1735 if (DECL_P (decl)
1736 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1737 && (OMP_CLAUSE_MAP_KIND (c)
1738 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1739 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1740 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1741 && varpool_node::get_create (decl)->offloadable)
1742 break;
1743 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1744 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1745 && is_omp_target (ctx->stmt)
1746 && !is_gimple_omp_offloaded (ctx->stmt))
1747 break;
1748 if (DECL_P (decl))
1750 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1751 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1752 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1753 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1755 tree new_decl = lookup_decl (decl, ctx);
1756 TREE_TYPE (new_decl)
1757 = remap_type (TREE_TYPE (decl), &ctx->cb);
1759 else if (DECL_SIZE (decl)
1760 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1762 tree decl2 = DECL_VALUE_EXPR (decl);
1763 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1764 decl2 = TREE_OPERAND (decl2, 0);
1765 gcc_assert (DECL_P (decl2));
1766 fixup_remapped_decl (decl2, ctx, false);
1767 fixup_remapped_decl (decl, ctx, true);
1769 else
1770 fixup_remapped_decl (decl, ctx, false);
1772 break;
1774 case OMP_CLAUSE_COPYPRIVATE:
1775 case OMP_CLAUSE_COPYIN:
1776 case OMP_CLAUSE_DEFAULT:
1777 case OMP_CLAUSE_IF:
1778 case OMP_CLAUSE_NUM_THREADS:
1779 case OMP_CLAUSE_NUM_TEAMS:
1780 case OMP_CLAUSE_THREAD_LIMIT:
1781 case OMP_CLAUSE_DEVICE:
1782 case OMP_CLAUSE_SCHEDULE:
1783 case OMP_CLAUSE_DIST_SCHEDULE:
1784 case OMP_CLAUSE_NOWAIT:
1785 case OMP_CLAUSE_ORDERED:
1786 case OMP_CLAUSE_COLLAPSE:
1787 case OMP_CLAUSE_UNTIED:
1788 case OMP_CLAUSE_FINAL:
1789 case OMP_CLAUSE_MERGEABLE:
1790 case OMP_CLAUSE_PROC_BIND:
1791 case OMP_CLAUSE_SAFELEN:
1792 case OMP_CLAUSE_SIMDLEN:
1793 case OMP_CLAUSE_ALIGNED:
1794 case OMP_CLAUSE_DEPEND:
1795 case OMP_CLAUSE_DETACH:
1796 case OMP_CLAUSE_ALLOCATE:
1797 case OMP_CLAUSE__LOOPTEMP_:
1798 case OMP_CLAUSE__REDUCTEMP_:
1799 case OMP_CLAUSE_TO:
1800 case OMP_CLAUSE_FROM:
1801 case OMP_CLAUSE_PRIORITY:
1802 case OMP_CLAUSE_GRAINSIZE:
1803 case OMP_CLAUSE_NUM_TASKS:
1804 case OMP_CLAUSE_THREADS:
1805 case OMP_CLAUSE_SIMD:
1806 case OMP_CLAUSE_NOGROUP:
1807 case OMP_CLAUSE_DEFAULTMAP:
1808 case OMP_CLAUSE_ORDER:
1809 case OMP_CLAUSE_BIND:
1810 case OMP_CLAUSE_USE_DEVICE_PTR:
1811 case OMP_CLAUSE_USE_DEVICE_ADDR:
1812 case OMP_CLAUSE_NONTEMPORAL:
1813 case OMP_CLAUSE_ASYNC:
1814 case OMP_CLAUSE_WAIT:
1815 case OMP_CLAUSE_NUM_GANGS:
1816 case OMP_CLAUSE_NUM_WORKERS:
1817 case OMP_CLAUSE_VECTOR_LENGTH:
1818 case OMP_CLAUSE_GANG:
1819 case OMP_CLAUSE_WORKER:
1820 case OMP_CLAUSE_VECTOR:
1821 case OMP_CLAUSE_INDEPENDENT:
1822 case OMP_CLAUSE_AUTO:
1823 case OMP_CLAUSE_SEQ:
1824 case OMP_CLAUSE_TILE:
1825 case OMP_CLAUSE__SIMT_:
1826 case OMP_CLAUSE_IF_PRESENT:
1827 case OMP_CLAUSE_FINALIZE:
1828 case OMP_CLAUSE__CONDTEMP_:
1829 break;
1831 case OMP_CLAUSE__CACHE_:
1832 default:
1833 gcc_unreachable ();
1837 gcc_checking_assert (!scan_array_reductions
1838 || !is_gimple_omp_oacc (ctx->stmt));
1839 if (scan_array_reductions)
1841 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1842 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1843 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1844 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1845 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1847 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1848 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1850 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1851 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1852 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1853 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1854 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1855 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1859 /* Create a new name for omp child function. Returns an identifier. */
1861 static tree
1862 create_omp_child_function_name (bool task_copy)
1864 return clone_function_name_numbered (current_function_decl,
1865 task_copy ? "_omp_cpyfn" : "_omp_fn");
1868 /* Return true if CTX may belong to offloaded code: either if current function
1869 is offloaded, or any enclosing context corresponds to a target region. */
1871 static bool
1872 omp_maybe_offloaded_ctx (omp_context *ctx)
1874 if (cgraph_node::get (current_function_decl)->offloadable)
1875 return true;
1876 for (; ctx; ctx = ctx->outer)
1877 if (is_gimple_omp_offloaded (ctx->stmt))
1878 return true;
1879 return false;
1882 /* Build a decl for the omp child function. It'll not contain a body
1883 yet, just the bare decl. */
1885 static void
1886 create_omp_child_function (omp_context *ctx, bool task_copy)
1888 tree decl, type, name, t;
1890 name = create_omp_child_function_name (task_copy);
1891 if (task_copy)
1892 type = build_function_type_list (void_type_node, ptr_type_node,
1893 ptr_type_node, NULL_TREE);
1894 else
1895 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1897 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1899 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1900 || !task_copy);
1901 if (!task_copy)
1902 ctx->cb.dst_fn = decl;
1903 else
1904 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1906 TREE_STATIC (decl) = 1;
1907 TREE_USED (decl) = 1;
1908 DECL_ARTIFICIAL (decl) = 1;
1909 DECL_IGNORED_P (decl) = 0;
1910 TREE_PUBLIC (decl) = 0;
1911 DECL_UNINLINABLE (decl) = 1;
1912 DECL_EXTERNAL (decl) = 0;
1913 DECL_CONTEXT (decl) = NULL_TREE;
1914 DECL_INITIAL (decl) = make_node (BLOCK);
1915 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1916 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1917 /* Remove omp declare simd attribute from the new attributes. */
1918 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1920 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1921 a = a2;
1922 a = TREE_CHAIN (a);
1923 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1924 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1925 *p = TREE_CHAIN (*p);
1926 else
1928 tree chain = TREE_CHAIN (*p);
1929 *p = copy_node (*p);
1930 p = &TREE_CHAIN (*p);
1931 *p = chain;
1934 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1935 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1936 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1937 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1938 DECL_FUNCTION_VERSIONED (decl)
1939 = DECL_FUNCTION_VERSIONED (current_function_decl);
1941 if (omp_maybe_offloaded_ctx (ctx))
1943 cgraph_node::get_create (decl)->offloadable = 1;
1944 if (ENABLE_OFFLOADING)
1945 g->have_offload = true;
1948 if (cgraph_node::get_create (decl)->offloadable)
1950 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1951 ? "omp target entrypoint"
1952 : "omp declare target");
1953 if (lookup_attribute ("omp declare target",
1954 DECL_ATTRIBUTES (current_function_decl)))
1956 if (is_gimple_omp_offloaded (ctx->stmt))
1957 DECL_ATTRIBUTES (decl)
1958 = remove_attribute ("omp declare target",
1959 copy_list (DECL_ATTRIBUTES (decl)));
1960 else
1961 target_attr = NULL;
1963 if (target_attr)
1964 DECL_ATTRIBUTES (decl)
1965 = tree_cons (get_identifier (target_attr),
1966 NULL_TREE, DECL_ATTRIBUTES (decl));
1969 t = build_decl (DECL_SOURCE_LOCATION (decl),
1970 RESULT_DECL, NULL_TREE, void_type_node);
1971 DECL_ARTIFICIAL (t) = 1;
1972 DECL_IGNORED_P (t) = 1;
1973 DECL_CONTEXT (t) = decl;
1974 DECL_RESULT (decl) = t;
1976 tree data_name = get_identifier (".omp_data_i");
1977 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1978 ptr_type_node);
1979 DECL_ARTIFICIAL (t) = 1;
1980 DECL_NAMELESS (t) = 1;
1981 DECL_ARG_TYPE (t) = ptr_type_node;
1982 DECL_CONTEXT (t) = current_function_decl;
1983 TREE_USED (t) = 1;
1984 TREE_READONLY (t) = 1;
1985 DECL_ARGUMENTS (decl) = t;
1986 if (!task_copy)
1987 ctx->receiver_decl = t;
1988 else
1990 t = build_decl (DECL_SOURCE_LOCATION (decl),
1991 PARM_DECL, get_identifier (".omp_data_o"),
1992 ptr_type_node);
1993 DECL_ARTIFICIAL (t) = 1;
1994 DECL_NAMELESS (t) = 1;
1995 DECL_ARG_TYPE (t) = ptr_type_node;
1996 DECL_CONTEXT (t) = current_function_decl;
1997 TREE_USED (t) = 1;
1998 TREE_ADDRESSABLE (t) = 1;
1999 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2000 DECL_ARGUMENTS (decl) = t;
2003 /* Allocate memory for the function structure. The call to
2004 allocate_struct_function clobbers CFUN, so we need to restore
2005 it afterward. */
2006 push_struct_function (decl);
2007 cfun->function_end_locus = gimple_location (ctx->stmt);
2008 init_tree_ssa (cfun);
2009 pop_cfun ();
2012 /* Callback for walk_gimple_seq. Check if combined parallel
2013 contains gimple_omp_for_combined_into_p OMP_FOR. */
2015 tree
2016 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2017 bool *handled_ops_p,
2018 struct walk_stmt_info *wi)
2020 gimple *stmt = gsi_stmt (*gsi_p);
2022 *handled_ops_p = true;
2023 switch (gimple_code (stmt))
2025 WALK_SUBSTMTS;
2027 case GIMPLE_OMP_FOR:
2028 if (gimple_omp_for_combined_into_p (stmt)
2029 && gimple_omp_for_kind (stmt)
2030 == *(const enum gf_mask *) (wi->info))
2032 wi->info = stmt;
2033 return integer_zero_node;
2035 break;
2036 default:
2037 break;
2039 return NULL;
2042 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2044 static void
2045 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2046 omp_context *outer_ctx)
2048 struct walk_stmt_info wi;
2050 memset (&wi, 0, sizeof (wi));
2051 wi.val_only = true;
2052 wi.info = (void *) &msk;
2053 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2054 if (wi.info != (void *) &msk)
2056 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2057 struct omp_for_data fd;
2058 omp_extract_for_data (for_stmt, &fd, NULL);
2059 /* We need two temporaries with fd.loop.v type (istart/iend)
2060 and then (fd.collapse - 1) temporaries with the same
2061 type for count2 ... countN-1 vars if not constant. */
2062 size_t count = 2, i;
2063 tree type = fd.iter_type;
2064 if (fd.collapse > 1
2065 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2067 count += fd.collapse - 1;
2068 /* If there are lastprivate clauses on the inner
2069 GIMPLE_OMP_FOR, add one more temporaries for the total number
2070 of iterations (product of count1 ... countN-1). */
2071 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2072 OMP_CLAUSE_LASTPRIVATE)
2073 || (msk == GF_OMP_FOR_KIND_FOR
2074 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2075 OMP_CLAUSE_LASTPRIVATE)))
2077 tree temp = create_tmp_var (type);
2078 tree c = build_omp_clause (UNKNOWN_LOCATION,
2079 OMP_CLAUSE__LOOPTEMP_);
2080 insert_decl_map (&outer_ctx->cb, temp, temp);
2081 OMP_CLAUSE_DECL (c) = temp;
2082 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2083 gimple_omp_taskreg_set_clauses (stmt, c);
2085 if (fd.non_rect
2086 && fd.last_nonrect == fd.first_nonrect + 1)
2087 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2088 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2090 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2091 tree type2 = TREE_TYPE (v);
2092 count++;
2093 for (i = 0; i < 3; i++)
2095 tree temp = create_tmp_var (type2);
2096 tree c = build_omp_clause (UNKNOWN_LOCATION,
2097 OMP_CLAUSE__LOOPTEMP_);
2098 insert_decl_map (&outer_ctx->cb, temp, temp);
2099 OMP_CLAUSE_DECL (c) = temp;
2100 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2101 gimple_omp_taskreg_set_clauses (stmt, c);
2105 for (i = 0; i < count; i++)
2107 tree temp = create_tmp_var (type);
2108 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2109 insert_decl_map (&outer_ctx->cb, temp, temp);
2110 OMP_CLAUSE_DECL (c) = temp;
2111 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2112 gimple_omp_taskreg_set_clauses (stmt, c);
2115 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2116 && omp_find_clause (gimple_omp_task_clauses (stmt),
2117 OMP_CLAUSE_REDUCTION))
2119 tree type = build_pointer_type (pointer_sized_int_node);
2120 tree temp = create_tmp_var (type);
2121 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2122 insert_decl_map (&outer_ctx->cb, temp, temp);
2123 OMP_CLAUSE_DECL (c) = temp;
2124 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2125 gimple_omp_task_set_clauses (stmt, c);
2129 /* Scan an OpenMP parallel directive. */
2131 static void
2132 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2134 omp_context *ctx;
2135 tree name;
2136 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2138 /* Ignore parallel directives with empty bodies, unless there
2139 are copyin clauses. */
2140 if (optimize > 0
2141 && empty_body_p (gimple_omp_body (stmt))
2142 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2143 OMP_CLAUSE_COPYIN) == NULL)
2145 gsi_replace (gsi, gimple_build_nop (), false);
2146 return;
2149 if (gimple_omp_parallel_combined_p (stmt))
2150 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2151 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2152 OMP_CLAUSE_REDUCTION);
2153 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2154 if (OMP_CLAUSE_REDUCTION_TASK (c))
2156 tree type = build_pointer_type (pointer_sized_int_node);
2157 tree temp = create_tmp_var (type);
2158 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2159 if (outer_ctx)
2160 insert_decl_map (&outer_ctx->cb, temp, temp);
2161 OMP_CLAUSE_DECL (c) = temp;
2162 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2163 gimple_omp_parallel_set_clauses (stmt, c);
2164 break;
2166 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2167 break;
2169 ctx = new_omp_context (stmt, outer_ctx);
2170 taskreg_contexts.safe_push (ctx);
2171 if (taskreg_nesting_level > 1)
2172 ctx->is_nested = true;
2173 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2174 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2175 name = create_tmp_var_name (".omp_data_s");
2176 name = build_decl (gimple_location (stmt),
2177 TYPE_DECL, name, ctx->record_type);
2178 DECL_ARTIFICIAL (name) = 1;
2179 DECL_NAMELESS (name) = 1;
2180 TYPE_NAME (ctx->record_type) = name;
2181 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2182 create_omp_child_function (ctx, false);
2183 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2185 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2186 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2188 if (TYPE_FIELDS (ctx->record_type) == NULL)
2189 ctx->record_type = ctx->receiver_decl = NULL;
2192 /* Scan an OpenMP task directive. */
2194 static void
2195 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2197 omp_context *ctx;
2198 tree name, t;
2199 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2201 /* Ignore task directives with empty bodies, unless they have depend
2202 clause. */
2203 if (optimize > 0
2204 && gimple_omp_body (stmt)
2205 && empty_body_p (gimple_omp_body (stmt))
2206 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2208 gsi_replace (gsi, gimple_build_nop (), false);
2209 return;
2212 if (gimple_omp_task_taskloop_p (stmt))
2213 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2215 ctx = new_omp_context (stmt, outer_ctx);
2217 if (gimple_omp_task_taskwait_p (stmt))
2219 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2220 return;
2223 taskreg_contexts.safe_push (ctx);
2224 if (taskreg_nesting_level > 1)
2225 ctx->is_nested = true;
2226 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2227 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2228 name = create_tmp_var_name (".omp_data_s");
2229 name = build_decl (gimple_location (stmt),
2230 TYPE_DECL, name, ctx->record_type);
2231 DECL_ARTIFICIAL (name) = 1;
2232 DECL_NAMELESS (name) = 1;
2233 TYPE_NAME (ctx->record_type) = name;
2234 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2235 create_omp_child_function (ctx, false);
2236 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2238 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2240 if (ctx->srecord_type)
2242 name = create_tmp_var_name (".omp_data_a");
2243 name = build_decl (gimple_location (stmt),
2244 TYPE_DECL, name, ctx->srecord_type);
2245 DECL_ARTIFICIAL (name) = 1;
2246 DECL_NAMELESS (name) = 1;
2247 TYPE_NAME (ctx->srecord_type) = name;
2248 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2249 create_omp_child_function (ctx, true);
2252 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2254 if (TYPE_FIELDS (ctx->record_type) == NULL)
2256 ctx->record_type = ctx->receiver_decl = NULL;
2257 t = build_int_cst (long_integer_type_node, 0);
2258 gimple_omp_task_set_arg_size (stmt, t);
2259 t = build_int_cst (long_integer_type_node, 1);
2260 gimple_omp_task_set_arg_align (stmt, t);
2264 /* Helper function for finish_taskreg_scan, called through walk_tree.
2265 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2266 tree, replace it in the expression. */
2268 static tree
2269 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2271 if (VAR_P (*tp))
2273 omp_context *ctx = (omp_context *) data;
2274 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2275 if (t != *tp)
2277 if (DECL_HAS_VALUE_EXPR_P (t))
2278 t = unshare_expr (DECL_VALUE_EXPR (t));
2279 *tp = t;
2281 *walk_subtrees = 0;
2283 else if (IS_TYPE_OR_DECL_P (*tp))
2284 *walk_subtrees = 0;
2285 return NULL_TREE;
2288 /* If any decls have been made addressable during scan_omp,
2289 adjust their fields if needed, and layout record types
2290 of parallel/task constructs. */
2292 static void
2293 finish_taskreg_scan (omp_context *ctx)
2295 if (ctx->record_type == NULL_TREE)
2296 return;
2298 /* If any task_shared_vars were needed, verify all
2299 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2300 statements if use_pointer_for_field hasn't changed
2301 because of that. If it did, update field types now. */
2302 if (task_shared_vars)
2304 tree c;
2306 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2307 c; c = OMP_CLAUSE_CHAIN (c))
2308 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2309 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2311 tree decl = OMP_CLAUSE_DECL (c);
2313 /* Global variables don't need to be copied,
2314 the receiver side will use them directly. */
2315 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2316 continue;
2317 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2318 || !use_pointer_for_field (decl, ctx))
2319 continue;
2320 tree field = lookup_field (decl, ctx);
2321 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2322 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2323 continue;
2324 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2325 TREE_THIS_VOLATILE (field) = 0;
2326 DECL_USER_ALIGN (field) = 0;
2327 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2328 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2329 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2330 if (ctx->srecord_type)
2332 tree sfield = lookup_sfield (decl, ctx);
2333 TREE_TYPE (sfield) = TREE_TYPE (field);
2334 TREE_THIS_VOLATILE (sfield) = 0;
2335 DECL_USER_ALIGN (sfield) = 0;
2336 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2337 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2338 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2343 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2345 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2346 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2347 if (c)
2349 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2350 expects to find it at the start of data. */
2351 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2352 tree *p = &TYPE_FIELDS (ctx->record_type);
2353 while (*p)
2354 if (*p == f)
2356 *p = DECL_CHAIN (*p);
2357 break;
2359 else
2360 p = &DECL_CHAIN (*p);
2361 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2362 TYPE_FIELDS (ctx->record_type) = f;
2364 layout_type (ctx->record_type);
2365 fixup_child_record_type (ctx);
2367 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2369 layout_type (ctx->record_type);
2370 fixup_child_record_type (ctx);
2372 else
2374 location_t loc = gimple_location (ctx->stmt);
2375 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2376 tree detach_clause
2377 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2378 OMP_CLAUSE_DETACH);
2379 /* Move VLA fields to the end. */
2380 p = &TYPE_FIELDS (ctx->record_type);
2381 while (*p)
2382 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2383 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2385 *q = *p;
2386 *p = TREE_CHAIN (*p);
2387 TREE_CHAIN (*q) = NULL_TREE;
2388 q = &TREE_CHAIN (*q);
2390 else
2391 p = &DECL_CHAIN (*p);
2392 *p = vla_fields;
2393 if (gimple_omp_task_taskloop_p (ctx->stmt))
2395 /* Move fields corresponding to first and second _looptemp_
2396 clause first. There are filled by GOMP_taskloop
2397 and thus need to be in specific positions. */
2398 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2399 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2400 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2401 OMP_CLAUSE__LOOPTEMP_);
2402 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2403 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2404 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2405 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2406 p = &TYPE_FIELDS (ctx->record_type);
2407 while (*p)
2408 if (*p == f1 || *p == f2 || *p == f3)
2409 *p = DECL_CHAIN (*p);
2410 else
2411 p = &DECL_CHAIN (*p);
2412 DECL_CHAIN (f1) = f2;
2413 if (c3)
2415 DECL_CHAIN (f2) = f3;
2416 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2418 else
2419 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2420 TYPE_FIELDS (ctx->record_type) = f1;
2421 if (ctx->srecord_type)
2423 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2424 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2425 if (c3)
2426 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2427 p = &TYPE_FIELDS (ctx->srecord_type);
2428 while (*p)
2429 if (*p == f1 || *p == f2 || *p == f3)
2430 *p = DECL_CHAIN (*p);
2431 else
2432 p = &DECL_CHAIN (*p);
2433 DECL_CHAIN (f1) = f2;
2434 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2435 if (c3)
2437 DECL_CHAIN (f2) = f3;
2438 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2440 else
2441 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2442 TYPE_FIELDS (ctx->srecord_type) = f1;
2445 if (detach_clause)
2447 tree c, field;
2449 /* Look for a firstprivate clause with the detach event handle. */
2450 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2451 c; c = OMP_CLAUSE_CHAIN (c))
2453 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2454 continue;
2455 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2456 == OMP_CLAUSE_DECL (detach_clause))
2457 break;
2460 gcc_assert (c);
2461 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2463 /* Move field corresponding to the detach clause first.
2464 This is filled by GOMP_task and needs to be in a
2465 specific position. */
2466 p = &TYPE_FIELDS (ctx->record_type);
2467 while (*p)
2468 if (*p == field)
2469 *p = DECL_CHAIN (*p);
2470 else
2471 p = &DECL_CHAIN (*p);
2472 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2473 TYPE_FIELDS (ctx->record_type) = field;
2474 if (ctx->srecord_type)
2476 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2477 p = &TYPE_FIELDS (ctx->srecord_type);
2478 while (*p)
2479 if (*p == field)
2480 *p = DECL_CHAIN (*p);
2481 else
2482 p = &DECL_CHAIN (*p);
2483 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2484 TYPE_FIELDS (ctx->srecord_type) = field;
2487 layout_type (ctx->record_type);
2488 fixup_child_record_type (ctx);
2489 if (ctx->srecord_type)
2490 layout_type (ctx->srecord_type);
2491 tree t = fold_convert_loc (loc, long_integer_type_node,
2492 TYPE_SIZE_UNIT (ctx->record_type));
2493 if (TREE_CODE (t) != INTEGER_CST)
2495 t = unshare_expr (t);
2496 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2498 gimple_omp_task_set_arg_size (ctx->stmt, t);
2499 t = build_int_cst (long_integer_type_node,
2500 TYPE_ALIGN_UNIT (ctx->record_type));
2501 gimple_omp_task_set_arg_align (ctx->stmt, t);
2505 /* Find the enclosing offload context. */
2507 static omp_context *
2508 enclosing_target_ctx (omp_context *ctx)
2510 for (; ctx; ctx = ctx->outer)
2511 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2512 break;
2514 return ctx;
2517 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2518 construct.
2519 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2521 static bool
2522 ctx_in_oacc_kernels_region (omp_context *ctx)
2524 for (;ctx != NULL; ctx = ctx->outer)
2526 gimple *stmt = ctx->stmt;
2527 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2528 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2529 return true;
2532 return false;
2535 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2536 (This doesn't include OpenACC 'kernels' decomposed parts.)
2537 Until kernels handling moves to use the same loop indirection
2538 scheme as parallel, we need to do this checking early. */
2540 static unsigned
2541 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2543 bool checking = true;
2544 unsigned outer_mask = 0;
2545 unsigned this_mask = 0;
2546 bool has_seq = false, has_auto = false;
2548 if (ctx->outer)
2549 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2550 if (!stmt)
2552 checking = false;
2553 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2554 return outer_mask;
2555 stmt = as_a <gomp_for *> (ctx->stmt);
2558 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2560 switch (OMP_CLAUSE_CODE (c))
2562 case OMP_CLAUSE_GANG:
2563 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2564 break;
2565 case OMP_CLAUSE_WORKER:
2566 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2567 break;
2568 case OMP_CLAUSE_VECTOR:
2569 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2570 break;
2571 case OMP_CLAUSE_SEQ:
2572 has_seq = true;
2573 break;
2574 case OMP_CLAUSE_AUTO:
2575 has_auto = true;
2576 break;
2577 default:
2578 break;
2582 if (checking)
2584 if (has_seq && (this_mask || has_auto))
2585 error_at (gimple_location (stmt), "%<seq%> overrides other"
2586 " OpenACC loop specifiers");
2587 else if (has_auto && this_mask)
2588 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2589 " OpenACC loop specifiers");
2591 if (this_mask & outer_mask)
2592 error_at (gimple_location (stmt), "inner loop uses same"
2593 " OpenACC parallelism as containing loop");
2596 return outer_mask | this_mask;
2599 /* Scan a GIMPLE_OMP_FOR. */
2601 static omp_context *
2602 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2604 omp_context *ctx;
2605 size_t i;
2606 tree clauses = gimple_omp_for_clauses (stmt);
2608 ctx = new_omp_context (stmt, outer_ctx);
2610 if (is_gimple_omp_oacc (stmt))
2612 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2614 if (!(tgt && is_oacc_kernels (tgt)))
2615 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2617 tree c_op0;
2618 switch (OMP_CLAUSE_CODE (c))
2620 case OMP_CLAUSE_GANG:
2621 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2622 break;
2624 case OMP_CLAUSE_WORKER:
2625 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2626 break;
2628 case OMP_CLAUSE_VECTOR:
2629 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2630 break;
2632 default:
2633 continue;
2636 if (c_op0)
2638 /* By construction, this is impossible for OpenACC 'kernels'
2639 decomposed parts. */
2640 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2642 error_at (OMP_CLAUSE_LOCATION (c),
2643 "argument not permitted on %qs clause",
2644 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2645 if (tgt)
2646 inform (gimple_location (tgt->stmt),
2647 "enclosing parent compute construct");
2648 else if (oacc_get_fn_attrib (current_function_decl))
2649 inform (DECL_SOURCE_LOCATION (current_function_decl),
2650 "enclosing routine");
2651 else
2652 gcc_unreachable ();
2656 if (tgt && is_oacc_kernels (tgt))
2657 check_oacc_kernel_gwv (stmt, ctx);
2659 /* Collect all variables named in reductions on this loop. Ensure
2660 that, if this loop has a reduction on some variable v, and there is
2661 a reduction on v somewhere in an outer context, then there is a
2662 reduction on v on all intervening loops as well. */
2663 tree local_reduction_clauses = NULL;
2664 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2666 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2667 local_reduction_clauses
2668 = tree_cons (NULL, c, local_reduction_clauses);
2670 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2671 ctx->outer_reduction_clauses
2672 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2673 ctx->outer->outer_reduction_clauses);
2674 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2675 tree local_iter = local_reduction_clauses;
2676 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2678 tree local_clause = TREE_VALUE (local_iter);
2679 tree local_var = OMP_CLAUSE_DECL (local_clause);
2680 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2681 bool have_outer_reduction = false;
2682 tree ctx_iter = outer_reduction_clauses;
2683 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2685 tree outer_clause = TREE_VALUE (ctx_iter);
2686 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2687 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2688 if (outer_var == local_var && outer_op != local_op)
2690 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2691 "conflicting reduction operations for %qE",
2692 local_var);
2693 inform (OMP_CLAUSE_LOCATION (outer_clause),
2694 "location of the previous reduction for %qE",
2695 outer_var);
2697 if (outer_var == local_var)
2699 have_outer_reduction = true;
2700 break;
2703 if (have_outer_reduction)
2705 /* There is a reduction on outer_var both on this loop and on
2706 some enclosing loop. Walk up the context tree until such a
2707 loop with a reduction on outer_var is found, and complain
2708 about all intervening loops that do not have such a
2709 reduction. */
2710 struct omp_context *curr_loop = ctx->outer;
2711 bool found = false;
2712 while (curr_loop != NULL)
2714 tree curr_iter = curr_loop->local_reduction_clauses;
2715 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2717 tree curr_clause = TREE_VALUE (curr_iter);
2718 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2719 if (curr_var == local_var)
2721 found = true;
2722 break;
2725 if (!found)
2726 warning_at (gimple_location (curr_loop->stmt), 0,
2727 "nested loop in reduction needs "
2728 "reduction clause for %qE",
2729 local_var);
2730 else
2731 break;
2732 curr_loop = curr_loop->outer;
2736 ctx->local_reduction_clauses = local_reduction_clauses;
2737 ctx->outer_reduction_clauses
2738 = chainon (unshare_expr (ctx->local_reduction_clauses),
2739 ctx->outer_reduction_clauses);
2741 if (tgt && is_oacc_kernels (tgt))
2743 /* Strip out reductions, as they are not handled yet. */
2744 tree *prev_ptr = &clauses;
2746 while (tree probe = *prev_ptr)
2748 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2750 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2751 *prev_ptr = *next_ptr;
2752 else
2753 prev_ptr = next_ptr;
2756 gimple_omp_for_set_clauses (stmt, clauses);
2760 scan_sharing_clauses (clauses, ctx);
2762 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2763 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2765 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2766 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2767 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2768 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2770 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2771 return ctx;
2774 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2776 static void
2777 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2778 omp_context *outer_ctx)
2780 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2781 gsi_replace (gsi, bind, false);
2782 gimple_seq seq = NULL;
2783 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2784 tree cond = create_tmp_var_raw (integer_type_node);
2785 DECL_CONTEXT (cond) = current_function_decl;
2786 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2787 gimple_bind_set_vars (bind, cond);
2788 gimple_call_set_lhs (g, cond);
2789 gimple_seq_add_stmt (&seq, g);
2790 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2791 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2792 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2793 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2794 gimple_seq_add_stmt (&seq, g);
2795 g = gimple_build_label (lab1);
2796 gimple_seq_add_stmt (&seq, g);
2797 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2798 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2799 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2800 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2801 gimple_omp_for_set_clauses (new_stmt, clause);
2802 gimple_seq_add_stmt (&seq, new_stmt);
2803 g = gimple_build_goto (lab3);
2804 gimple_seq_add_stmt (&seq, g);
2805 g = gimple_build_label (lab2);
2806 gimple_seq_add_stmt (&seq, g);
2807 gimple_seq_add_stmt (&seq, stmt);
2808 g = gimple_build_label (lab3);
2809 gimple_seq_add_stmt (&seq, g);
2810 gimple_bind_set_body (bind, seq);
2811 update_stmt (bind);
2812 scan_omp_for (new_stmt, outer_ctx);
2813 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2816 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2817 struct walk_stmt_info *);
2818 static omp_context *maybe_lookup_ctx (gimple *);
2820 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2821 for scan phase loop. */
2823 static void
2824 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2825 omp_context *outer_ctx)
2827 /* The only change between inclusive and exclusive scan will be
2828 within the first simd loop, so just use inclusive in the
2829 worksharing loop. */
2830 outer_ctx->scan_inclusive = true;
2831 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2832 OMP_CLAUSE_DECL (c) = integer_zero_node;
2834 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2835 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2836 gsi_replace (gsi, input_stmt, false);
2837 gimple_seq input_body = NULL;
2838 gimple_seq_add_stmt (&input_body, stmt);
2839 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2841 gimple_stmt_iterator input1_gsi = gsi_none ();
2842 struct walk_stmt_info wi;
2843 memset (&wi, 0, sizeof (wi));
2844 wi.val_only = true;
2845 wi.info = (void *) &input1_gsi;
2846 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2847 gcc_assert (!gsi_end_p (input1_gsi));
2849 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2850 gsi_next (&input1_gsi);
2851 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2852 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2853 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2854 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2855 std::swap (input_stmt1, scan_stmt1);
2857 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2858 gimple_omp_set_body (input_stmt1, NULL);
2860 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2861 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2863 gimple_omp_set_body (input_stmt1, input_body1);
2864 gimple_omp_set_body (scan_stmt1, NULL);
2866 gimple_stmt_iterator input2_gsi = gsi_none ();
2867 memset (&wi, 0, sizeof (wi));
2868 wi.val_only = true;
2869 wi.info = (void *) &input2_gsi;
2870 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2871 NULL, &wi);
2872 gcc_assert (!gsi_end_p (input2_gsi));
2874 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2875 gsi_next (&input2_gsi);
2876 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2877 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2878 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2879 std::swap (input_stmt2, scan_stmt2);
2881 gimple_omp_set_body (input_stmt2, NULL);
2883 gimple_omp_set_body (input_stmt, input_body);
2884 gimple_omp_set_body (scan_stmt, scan_body);
2886 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2887 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2889 ctx = new_omp_context (scan_stmt, outer_ctx);
2890 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2892 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2895 /* Scan an OpenMP sections directive. */
2897 static void
2898 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2900 omp_context *ctx;
2902 ctx = new_omp_context (stmt, outer_ctx);
2903 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2904 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2907 /* Scan an OpenMP single directive. */
2909 static void
2910 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2912 omp_context *ctx;
2913 tree name;
2915 ctx = new_omp_context (stmt, outer_ctx);
2916 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2917 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2918 name = create_tmp_var_name (".omp_copy_s");
2919 name = build_decl (gimple_location (stmt),
2920 TYPE_DECL, name, ctx->record_type);
2921 TYPE_NAME (ctx->record_type) = name;
2923 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2924 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2926 if (TYPE_FIELDS (ctx->record_type) == NULL)
2927 ctx->record_type = NULL;
2928 else
2929 layout_type (ctx->record_type);
2932 /* Scan a GIMPLE_OMP_TARGET. */
2934 static void
2935 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2937 omp_context *ctx;
2938 tree name;
2939 bool offloaded = is_gimple_omp_offloaded (stmt);
2940 tree clauses = gimple_omp_target_clauses (stmt);
2942 ctx = new_omp_context (stmt, outer_ctx);
2943 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2944 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2945 name = create_tmp_var_name (".omp_data_t");
2946 name = build_decl (gimple_location (stmt),
2947 TYPE_DECL, name, ctx->record_type);
2948 DECL_ARTIFICIAL (name) = 1;
2949 DECL_NAMELESS (name) = 1;
2950 TYPE_NAME (ctx->record_type) = name;
2951 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2953 if (offloaded)
2955 create_omp_child_function (ctx, false);
2956 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2959 scan_sharing_clauses (clauses, ctx);
2960 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2962 if (TYPE_FIELDS (ctx->record_type) == NULL)
2963 ctx->record_type = ctx->receiver_decl = NULL;
2964 else
2966 TYPE_FIELDS (ctx->record_type)
2967 = nreverse (TYPE_FIELDS (ctx->record_type));
2968 if (flag_checking)
2970 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2971 for (tree field = TYPE_FIELDS (ctx->record_type);
2972 field;
2973 field = DECL_CHAIN (field))
2974 gcc_assert (DECL_ALIGN (field) == align);
2976 layout_type (ctx->record_type);
2977 if (offloaded)
2978 fixup_child_record_type (ctx);
2981 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
2983 error_at (gimple_location (stmt),
2984 "%<target%> construct with nested %<teams%> construct "
2985 "contains directives outside of the %<teams%> construct");
2986 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
2990 /* Scan an OpenMP teams directive. */
2992 static void
2993 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2995 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2997 if (!gimple_omp_teams_host (stmt))
2999 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3000 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3001 return;
3003 taskreg_contexts.safe_push (ctx);
3004 gcc_assert (taskreg_nesting_level == 1);
3005 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3006 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3007 tree name = create_tmp_var_name (".omp_data_s");
3008 name = build_decl (gimple_location (stmt),
3009 TYPE_DECL, name, ctx->record_type);
3010 DECL_ARTIFICIAL (name) = 1;
3011 DECL_NAMELESS (name) = 1;
3012 TYPE_NAME (ctx->record_type) = name;
3013 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3014 create_omp_child_function (ctx, false);
3015 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3017 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3018 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3020 if (TYPE_FIELDS (ctx->record_type) == NULL)
3021 ctx->record_type = ctx->receiver_decl = NULL;
3024 /* Check nesting restrictions. */
3025 static bool
3026 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3028 tree c;
3030 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3031 inside an OpenACC CTX. */
3032 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3033 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3034 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3036 else if (!(is_gimple_omp (stmt)
3037 && is_gimple_omp_oacc (stmt)))
3039 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3041 error_at (gimple_location (stmt),
3042 "non-OpenACC construct inside of OpenACC routine");
3043 return false;
3045 else
3046 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3047 if (is_gimple_omp (octx->stmt)
3048 && is_gimple_omp_oacc (octx->stmt))
3050 error_at (gimple_location (stmt),
3051 "non-OpenACC construct inside of OpenACC region");
3052 return false;
3056 if (ctx != NULL)
3058 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3059 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3061 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3062 ctx->teams_nested_p = true;
3063 else
3064 ctx->nonteams_nested_p = true;
3066 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3067 && ctx->outer
3068 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3069 ctx = ctx->outer;
3070 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3071 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3072 && !ctx->loop_p)
3074 c = NULL_TREE;
3075 if (ctx->order_concurrent
3076 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3077 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3078 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3080 error_at (gimple_location (stmt),
3081 "OpenMP constructs other than %<parallel%>, %<loop%>"
3082 " or %<simd%> may not be nested inside a region with"
3083 " the %<order(concurrent)%> clause");
3084 return false;
3086 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3088 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3089 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3091 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3092 && (ctx->outer == NULL
3093 || !gimple_omp_for_combined_into_p (ctx->stmt)
3094 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3095 || (gimple_omp_for_kind (ctx->outer->stmt)
3096 != GF_OMP_FOR_KIND_FOR)
3097 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3099 error_at (gimple_location (stmt),
3100 "%<ordered simd threads%> must be closely "
3101 "nested inside of %<%s simd%> region",
3102 lang_GNU_Fortran () ? "do" : "for");
3103 return false;
3105 return true;
3108 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3109 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3110 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3111 return true;
3112 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3113 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3114 return true;
3115 error_at (gimple_location (stmt),
3116 "OpenMP constructs other than "
3117 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3118 "not be nested inside %<simd%> region");
3119 return false;
3121 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3123 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3124 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3125 && omp_find_clause (gimple_omp_for_clauses (stmt),
3126 OMP_CLAUSE_BIND) == NULL_TREE))
3127 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3129 error_at (gimple_location (stmt),
3130 "only %<distribute%>, %<parallel%> or %<loop%> "
3131 "regions are allowed to be strictly nested inside "
3132 "%<teams%> region");
3133 return false;
3136 else if (ctx->order_concurrent
3137 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3138 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3139 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3140 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3142 if (ctx->loop_p)
3143 error_at (gimple_location (stmt),
3144 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3145 "%<simd%> may not be nested inside a %<loop%> region");
3146 else
3147 error_at (gimple_location (stmt),
3148 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3149 "%<simd%> may not be nested inside a region with "
3150 "the %<order(concurrent)%> clause");
3151 return false;
3154 switch (gimple_code (stmt))
3156 case GIMPLE_OMP_FOR:
3157 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3158 return true;
3159 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3161 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3163 error_at (gimple_location (stmt),
3164 "%<distribute%> region must be strictly nested "
3165 "inside %<teams%> construct");
3166 return false;
3168 return true;
3170 /* We split taskloop into task and nested taskloop in it. */
3171 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3172 return true;
3173 /* For now, hope this will change and loop bind(parallel) will not
3174 be allowed in lots of contexts. */
3175 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3176 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3177 return true;
3178 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3180 bool ok = false;
3182 if (ctx)
3183 switch (gimple_code (ctx->stmt))
3185 case GIMPLE_OMP_FOR:
3186 ok = (gimple_omp_for_kind (ctx->stmt)
3187 == GF_OMP_FOR_KIND_OACC_LOOP);
3188 break;
3190 case GIMPLE_OMP_TARGET:
3191 switch (gimple_omp_target_kind (ctx->stmt))
3193 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3194 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3195 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3196 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3197 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3198 ok = true;
3199 break;
3201 default:
3202 break;
3205 default:
3206 break;
3208 else if (oacc_get_fn_attrib (current_function_decl))
3209 ok = true;
3210 if (!ok)
3212 error_at (gimple_location (stmt),
3213 "OpenACC loop directive must be associated with"
3214 " an OpenACC compute region");
3215 return false;
3218 /* FALLTHRU */
3219 case GIMPLE_CALL:
3220 if (is_gimple_call (stmt)
3221 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3222 == BUILT_IN_GOMP_CANCEL
3223 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3224 == BUILT_IN_GOMP_CANCELLATION_POINT))
3226 const char *bad = NULL;
3227 const char *kind = NULL;
3228 const char *construct
3229 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3230 == BUILT_IN_GOMP_CANCEL)
3231 ? "cancel"
3232 : "cancellation point";
3233 if (ctx == NULL)
3235 error_at (gimple_location (stmt), "orphaned %qs construct",
3236 construct);
3237 return false;
3239 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3240 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3241 : 0)
3243 case 1:
3244 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3245 bad = "parallel";
3246 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3247 == BUILT_IN_GOMP_CANCEL
3248 && !integer_zerop (gimple_call_arg (stmt, 1)))
3249 ctx->cancellable = true;
3250 kind = "parallel";
3251 break;
3252 case 2:
3253 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3254 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3255 bad = "for";
3256 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3257 == BUILT_IN_GOMP_CANCEL
3258 && !integer_zerop (gimple_call_arg (stmt, 1)))
3260 ctx->cancellable = true;
3261 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3262 OMP_CLAUSE_NOWAIT))
3263 warning_at (gimple_location (stmt), 0,
3264 "%<cancel for%> inside "
3265 "%<nowait%> for construct");
3266 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3267 OMP_CLAUSE_ORDERED))
3268 warning_at (gimple_location (stmt), 0,
3269 "%<cancel for%> inside "
3270 "%<ordered%> for construct");
3272 kind = "for";
3273 break;
3274 case 4:
3275 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3276 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3277 bad = "sections";
3278 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3279 == BUILT_IN_GOMP_CANCEL
3280 && !integer_zerop (gimple_call_arg (stmt, 1)))
3282 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3284 ctx->cancellable = true;
3285 if (omp_find_clause (gimple_omp_sections_clauses
3286 (ctx->stmt),
3287 OMP_CLAUSE_NOWAIT))
3288 warning_at (gimple_location (stmt), 0,
3289 "%<cancel sections%> inside "
3290 "%<nowait%> sections construct");
3292 else
3294 gcc_assert (ctx->outer
3295 && gimple_code (ctx->outer->stmt)
3296 == GIMPLE_OMP_SECTIONS);
3297 ctx->outer->cancellable = true;
3298 if (omp_find_clause (gimple_omp_sections_clauses
3299 (ctx->outer->stmt),
3300 OMP_CLAUSE_NOWAIT))
3301 warning_at (gimple_location (stmt), 0,
3302 "%<cancel sections%> inside "
3303 "%<nowait%> sections construct");
3306 kind = "sections";
3307 break;
3308 case 8:
3309 if (!is_task_ctx (ctx)
3310 && (!is_taskloop_ctx (ctx)
3311 || ctx->outer == NULL
3312 || !is_task_ctx (ctx->outer)))
3313 bad = "task";
3314 else
3316 for (omp_context *octx = ctx->outer;
3317 octx; octx = octx->outer)
3319 switch (gimple_code (octx->stmt))
3321 case GIMPLE_OMP_TASKGROUP:
3322 break;
3323 case GIMPLE_OMP_TARGET:
3324 if (gimple_omp_target_kind (octx->stmt)
3325 != GF_OMP_TARGET_KIND_REGION)
3326 continue;
3327 /* FALLTHRU */
3328 case GIMPLE_OMP_PARALLEL:
3329 case GIMPLE_OMP_TEAMS:
3330 error_at (gimple_location (stmt),
3331 "%<%s taskgroup%> construct not closely "
3332 "nested inside of %<taskgroup%> region",
3333 construct);
3334 return false;
3335 case GIMPLE_OMP_TASK:
3336 if (gimple_omp_task_taskloop_p (octx->stmt)
3337 && octx->outer
3338 && is_taskloop_ctx (octx->outer))
3340 tree clauses
3341 = gimple_omp_for_clauses (octx->outer->stmt);
3342 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3343 break;
3345 continue;
3346 default:
3347 continue;
3349 break;
3351 ctx->cancellable = true;
3353 kind = "taskgroup";
3354 break;
3355 default:
3356 error_at (gimple_location (stmt), "invalid arguments");
3357 return false;
3359 if (bad)
3361 error_at (gimple_location (stmt),
3362 "%<%s %s%> construct not closely nested inside of %qs",
3363 construct, kind, bad);
3364 return false;
3367 /* FALLTHRU */
3368 case GIMPLE_OMP_SECTIONS:
3369 case GIMPLE_OMP_SINGLE:
3370 for (; ctx != NULL; ctx = ctx->outer)
3371 switch (gimple_code (ctx->stmt))
3373 case GIMPLE_OMP_FOR:
3374 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3375 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3376 break;
3377 /* FALLTHRU */
3378 case GIMPLE_OMP_SECTIONS:
3379 case GIMPLE_OMP_SINGLE:
3380 case GIMPLE_OMP_ORDERED:
3381 case GIMPLE_OMP_MASTER:
3382 case GIMPLE_OMP_TASK:
3383 case GIMPLE_OMP_CRITICAL:
3384 if (is_gimple_call (stmt))
3386 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3387 != BUILT_IN_GOMP_BARRIER)
3388 return true;
3389 error_at (gimple_location (stmt),
3390 "barrier region may not be closely nested inside "
3391 "of work-sharing, %<loop%>, %<critical%>, "
3392 "%<ordered%>, %<master%>, explicit %<task%> or "
3393 "%<taskloop%> region");
3394 return false;
3396 error_at (gimple_location (stmt),
3397 "work-sharing region may not be closely nested inside "
3398 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3399 "%<master%>, explicit %<task%> or %<taskloop%> region");
3400 return false;
3401 case GIMPLE_OMP_PARALLEL:
3402 case GIMPLE_OMP_TEAMS:
3403 return true;
3404 case GIMPLE_OMP_TARGET:
3405 if (gimple_omp_target_kind (ctx->stmt)
3406 == GF_OMP_TARGET_KIND_REGION)
3407 return true;
3408 break;
3409 default:
3410 break;
3412 break;
3413 case GIMPLE_OMP_MASTER:
3414 for (; ctx != NULL; ctx = ctx->outer)
3415 switch (gimple_code (ctx->stmt))
3417 case GIMPLE_OMP_FOR:
3418 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3419 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3420 break;
3421 /* FALLTHRU */
3422 case GIMPLE_OMP_SECTIONS:
3423 case GIMPLE_OMP_SINGLE:
3424 case GIMPLE_OMP_TASK:
3425 error_at (gimple_location (stmt),
3426 "%<master%> region may not be closely nested inside "
3427 "of work-sharing, %<loop%>, explicit %<task%> or "
3428 "%<taskloop%> region");
3429 return false;
3430 case GIMPLE_OMP_PARALLEL:
3431 case GIMPLE_OMP_TEAMS:
3432 return true;
3433 case GIMPLE_OMP_TARGET:
3434 if (gimple_omp_target_kind (ctx->stmt)
3435 == GF_OMP_TARGET_KIND_REGION)
3436 return true;
3437 break;
3438 default:
3439 break;
3441 break;
3442 case GIMPLE_OMP_TASK:
3443 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3444 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3445 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3446 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3448 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3449 error_at (OMP_CLAUSE_LOCATION (c),
3450 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3451 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3452 return false;
3454 break;
3455 case GIMPLE_OMP_ORDERED:
3456 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3457 c; c = OMP_CLAUSE_CHAIN (c))
3459 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3461 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3462 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3463 continue;
3465 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3466 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3467 || kind == OMP_CLAUSE_DEPEND_SINK)
3469 tree oclause;
3470 /* Look for containing ordered(N) loop. */
3471 if (ctx == NULL
3472 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3473 || (oclause
3474 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3475 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3477 error_at (OMP_CLAUSE_LOCATION (c),
3478 "%<ordered%> construct with %<depend%> clause "
3479 "must be closely nested inside an %<ordered%> "
3480 "loop");
3481 return false;
3483 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3485 error_at (OMP_CLAUSE_LOCATION (c),
3486 "%<ordered%> construct with %<depend%> clause "
3487 "must be closely nested inside a loop with "
3488 "%<ordered%> clause with a parameter");
3489 return false;
3492 else
3494 error_at (OMP_CLAUSE_LOCATION (c),
3495 "invalid depend kind in omp %<ordered%> %<depend%>");
3496 return false;
3499 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3500 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3502 /* ordered simd must be closely nested inside of simd region,
3503 and simd region must not encounter constructs other than
3504 ordered simd, therefore ordered simd may be either orphaned,
3505 or ctx->stmt must be simd. The latter case is handled already
3506 earlier. */
3507 if (ctx != NULL)
3509 error_at (gimple_location (stmt),
3510 "%<ordered%> %<simd%> must be closely nested inside "
3511 "%<simd%> region");
3512 return false;
3515 for (; ctx != NULL; ctx = ctx->outer)
3516 switch (gimple_code (ctx->stmt))
3518 case GIMPLE_OMP_CRITICAL:
3519 case GIMPLE_OMP_TASK:
3520 case GIMPLE_OMP_ORDERED:
3521 ordered_in_taskloop:
3522 error_at (gimple_location (stmt),
3523 "%<ordered%> region may not be closely nested inside "
3524 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3525 "%<taskloop%> region");
3526 return false;
3527 case GIMPLE_OMP_FOR:
3528 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3529 goto ordered_in_taskloop;
3530 tree o;
3531 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3532 OMP_CLAUSE_ORDERED);
3533 if (o == NULL)
3535 error_at (gimple_location (stmt),
3536 "%<ordered%> region must be closely nested inside "
3537 "a loop region with an %<ordered%> clause");
3538 return false;
3540 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3541 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3543 error_at (gimple_location (stmt),
3544 "%<ordered%> region without %<depend%> clause may "
3545 "not be closely nested inside a loop region with "
3546 "an %<ordered%> clause with a parameter");
3547 return false;
3549 return true;
3550 case GIMPLE_OMP_TARGET:
3551 if (gimple_omp_target_kind (ctx->stmt)
3552 != GF_OMP_TARGET_KIND_REGION)
3553 break;
3554 /* FALLTHRU */
3555 case GIMPLE_OMP_PARALLEL:
3556 case GIMPLE_OMP_TEAMS:
3557 error_at (gimple_location (stmt),
3558 "%<ordered%> region must be closely nested inside "
3559 "a loop region with an %<ordered%> clause");
3560 return false;
3561 default:
3562 break;
3564 break;
3565 case GIMPLE_OMP_CRITICAL:
3567 tree this_stmt_name
3568 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3569 for (; ctx != NULL; ctx = ctx->outer)
3570 if (gomp_critical *other_crit
3571 = dyn_cast <gomp_critical *> (ctx->stmt))
3572 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3574 error_at (gimple_location (stmt),
3575 "%<critical%> region may not be nested inside "
3576 "a %<critical%> region with the same name");
3577 return false;
3580 break;
3581 case GIMPLE_OMP_TEAMS:
3582 if (ctx == NULL)
3583 break;
3584 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3585 || (gimple_omp_target_kind (ctx->stmt)
3586 != GF_OMP_TARGET_KIND_REGION))
3588 /* Teams construct can appear either strictly nested inside of
3589 target construct with no intervening stmts, or can be encountered
3590 only by initial task (so must not appear inside any OpenMP
3591 construct. */
3592 error_at (gimple_location (stmt),
3593 "%<teams%> construct must be closely nested inside of "
3594 "%<target%> construct or not nested in any OpenMP "
3595 "construct");
3596 return false;
3598 break;
3599 case GIMPLE_OMP_TARGET:
3600 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3601 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3602 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3603 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3605 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3606 error_at (OMP_CLAUSE_LOCATION (c),
3607 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3608 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3609 return false;
3611 if (is_gimple_omp_offloaded (stmt)
3612 && oacc_get_fn_attrib (cfun->decl) != NULL)
3614 error_at (gimple_location (stmt),
3615 "OpenACC region inside of OpenACC routine, nested "
3616 "parallelism not supported yet");
3617 return false;
3619 for (; ctx != NULL; ctx = ctx->outer)
3621 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3623 if (is_gimple_omp (stmt)
3624 && is_gimple_omp_oacc (stmt)
3625 && is_gimple_omp (ctx->stmt))
3627 error_at (gimple_location (stmt),
3628 "OpenACC construct inside of non-OpenACC region");
3629 return false;
3631 continue;
3634 const char *stmt_name, *ctx_stmt_name;
3635 switch (gimple_omp_target_kind (stmt))
3637 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3638 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3639 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3640 case GF_OMP_TARGET_KIND_ENTER_DATA:
3641 stmt_name = "target enter data"; break;
3642 case GF_OMP_TARGET_KIND_EXIT_DATA:
3643 stmt_name = "target exit data"; break;
3644 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3645 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3646 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3647 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3648 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3649 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3650 stmt_name = "enter/exit data"; break;
3651 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3652 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3653 break;
3654 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3655 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3656 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3657 /* OpenACC 'kernels' decomposed parts. */
3658 stmt_name = "kernels"; break;
3659 default: gcc_unreachable ();
3661 switch (gimple_omp_target_kind (ctx->stmt))
3663 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3664 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3665 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3666 ctx_stmt_name = "parallel"; break;
3667 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3668 ctx_stmt_name = "kernels"; break;
3669 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3670 ctx_stmt_name = "serial"; break;
3671 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3672 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3673 ctx_stmt_name = "host_data"; break;
3674 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3675 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3676 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3677 /* OpenACC 'kernels' decomposed parts. */
3678 ctx_stmt_name = "kernels"; break;
3679 default: gcc_unreachable ();
3682 /* OpenACC/OpenMP mismatch? */
3683 if (is_gimple_omp_oacc (stmt)
3684 != is_gimple_omp_oacc (ctx->stmt))
3686 error_at (gimple_location (stmt),
3687 "%s %qs construct inside of %s %qs region",
3688 (is_gimple_omp_oacc (stmt)
3689 ? "OpenACC" : "OpenMP"), stmt_name,
3690 (is_gimple_omp_oacc (ctx->stmt)
3691 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3692 return false;
3694 if (is_gimple_omp_offloaded (ctx->stmt))
3696 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3697 if (is_gimple_omp_oacc (ctx->stmt))
3699 error_at (gimple_location (stmt),
3700 "%qs construct inside of %qs region",
3701 stmt_name, ctx_stmt_name);
3702 return false;
3704 else
3706 warning_at (gimple_location (stmt), 0,
3707 "%qs construct inside of %qs region",
3708 stmt_name, ctx_stmt_name);
3712 break;
3713 default:
3714 break;
3716 return true;
3720 /* Helper function scan_omp.
3722 Callback for walk_tree or operators in walk_gimple_stmt used to
3723 scan for OMP directives in TP. */
3725 static tree
3726 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3728 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3729 omp_context *ctx = (omp_context *) wi->info;
3730 tree t = *tp;
3732 switch (TREE_CODE (t))
3734 case VAR_DECL:
3735 case PARM_DECL:
3736 case LABEL_DECL:
3737 case RESULT_DECL:
3738 if (ctx)
3740 tree repl = remap_decl (t, &ctx->cb);
3741 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3742 *tp = repl;
3744 break;
3746 default:
3747 if (ctx && TYPE_P (t))
3748 *tp = remap_type (t, &ctx->cb);
3749 else if (!DECL_P (t))
3751 *walk_subtrees = 1;
3752 if (ctx)
3754 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3755 if (tem != TREE_TYPE (t))
3757 if (TREE_CODE (t) == INTEGER_CST)
3758 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3759 else
3760 TREE_TYPE (t) = tem;
3764 break;
3767 return NULL_TREE;
3770 /* Return true if FNDECL is a setjmp or a longjmp. */
3772 static bool
3773 setjmp_or_longjmp_p (const_tree fndecl)
3775 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3776 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3777 return true;
3779 tree declname = DECL_NAME (fndecl);
3780 if (!declname
3781 || (DECL_CONTEXT (fndecl) != NULL_TREE
3782 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3783 || !TREE_PUBLIC (fndecl))
3784 return false;
3786 const char *name = IDENTIFIER_POINTER (declname);
3787 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3790 /* Return true if FNDECL is an omp_* runtime API call. */
3792 static bool
3793 omp_runtime_api_call (const_tree fndecl)
3795 tree declname = DECL_NAME (fndecl);
3796 if (!declname
3797 || (DECL_CONTEXT (fndecl) != NULL_TREE
3798 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3799 || !TREE_PUBLIC (fndecl))
3800 return false;
3802 const char *name = IDENTIFIER_POINTER (declname);
3803 if (!startswith (name, "omp_"))
3804 return false;
3806 static const char *omp_runtime_apis[] =
3808 /* This array has 3 sections. First omp_* calls that don't
3809 have any suffixes. */
3810 "target_alloc",
3811 "target_associate_ptr",
3812 "target_disassociate_ptr",
3813 "target_free",
3814 "target_is_present",
3815 "target_memcpy",
3816 "target_memcpy_rect",
3817 NULL,
3818 /* Now omp_* calls that are available as omp_* and omp_*_. */
3819 "capture_affinity",
3820 "destroy_lock",
3821 "destroy_nest_lock",
3822 "display_affinity",
3823 "get_active_level",
3824 "get_affinity_format",
3825 "get_cancellation",
3826 "get_default_device",
3827 "get_dynamic",
3828 "get_initial_device",
3829 "get_level",
3830 "get_max_active_levels",
3831 "get_max_task_priority",
3832 "get_max_threads",
3833 "get_nested",
3834 "get_num_devices",
3835 "get_num_places",
3836 "get_num_procs",
3837 "get_num_teams",
3838 "get_num_threads",
3839 "get_partition_num_places",
3840 "get_place_num",
3841 "get_proc_bind",
3842 "get_team_num",
3843 "get_thread_limit",
3844 "get_thread_num",
3845 "get_wtick",
3846 "get_wtime",
3847 "in_final",
3848 "in_parallel",
3849 "init_lock",
3850 "init_nest_lock",
3851 "is_initial_device",
3852 "pause_resource",
3853 "pause_resource_all",
3854 "set_affinity_format",
3855 "set_lock",
3856 "set_nest_lock",
3857 "test_lock",
3858 "test_nest_lock",
3859 "unset_lock",
3860 "unset_nest_lock",
3861 NULL,
3862 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3863 "get_ancestor_thread_num",
3864 "get_partition_place_nums",
3865 "get_place_num_procs",
3866 "get_place_proc_ids",
3867 "get_schedule",
3868 "get_team_size",
3869 "set_default_device",
3870 "set_dynamic",
3871 "set_max_active_levels",
3872 "set_nested",
3873 "set_num_threads",
3874 "set_schedule"
3877 int mode = 0;
3878 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3880 if (omp_runtime_apis[i] == NULL)
3882 mode++;
3883 continue;
3885 size_t len = strlen (omp_runtime_apis[i]);
3886 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3887 && (name[4 + len] == '\0'
3888 || (mode > 0
3889 && name[4 + len] == '_'
3890 && (name[4 + len + 1] == '\0'
3891 || (mode > 1
3892 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3893 return true;
3895 return false;
3898 /* Helper function for scan_omp.
3900 Callback for walk_gimple_stmt used to scan for OMP directives in
3901 the current statement in GSI. */
3903 static tree
3904 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3905 struct walk_stmt_info *wi)
3907 gimple *stmt = gsi_stmt (*gsi);
3908 omp_context *ctx = (omp_context *) wi->info;
3910 if (gimple_has_location (stmt))
3911 input_location = gimple_location (stmt);
3913 /* Check the nesting restrictions. */
3914 bool remove = false;
3915 if (is_gimple_omp (stmt))
3916 remove = !check_omp_nesting_restrictions (stmt, ctx);
3917 else if (is_gimple_call (stmt))
3919 tree fndecl = gimple_call_fndecl (stmt);
3920 if (fndecl)
3922 if (ctx
3923 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3924 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3925 && setjmp_or_longjmp_p (fndecl)
3926 && !ctx->loop_p)
3928 remove = true;
3929 error_at (gimple_location (stmt),
3930 "setjmp/longjmp inside %<simd%> construct");
3932 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3933 switch (DECL_FUNCTION_CODE (fndecl))
3935 case BUILT_IN_GOMP_BARRIER:
3936 case BUILT_IN_GOMP_CANCEL:
3937 case BUILT_IN_GOMP_CANCELLATION_POINT:
3938 case BUILT_IN_GOMP_TASKYIELD:
3939 case BUILT_IN_GOMP_TASKWAIT:
3940 case BUILT_IN_GOMP_TASKGROUP_START:
3941 case BUILT_IN_GOMP_TASKGROUP_END:
3942 remove = !check_omp_nesting_restrictions (stmt, ctx);
3943 break;
3944 default:
3945 break;
3947 else if (ctx)
3949 omp_context *octx = ctx;
3950 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3951 octx = ctx->outer;
3952 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3954 remove = true;
3955 error_at (gimple_location (stmt),
3956 "OpenMP runtime API call %qD in a region with "
3957 "%<order(concurrent)%> clause", fndecl);
3962 if (remove)
3964 stmt = gimple_build_nop ();
3965 gsi_replace (gsi, stmt, false);
3968 *handled_ops_p = true;
3970 switch (gimple_code (stmt))
3972 case GIMPLE_OMP_PARALLEL:
3973 taskreg_nesting_level++;
3974 scan_omp_parallel (gsi, ctx);
3975 taskreg_nesting_level--;
3976 break;
3978 case GIMPLE_OMP_TASK:
3979 taskreg_nesting_level++;
3980 scan_omp_task (gsi, ctx);
3981 taskreg_nesting_level--;
3982 break;
3984 case GIMPLE_OMP_FOR:
3985 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3986 == GF_OMP_FOR_KIND_SIMD)
3987 && gimple_omp_for_combined_into_p (stmt)
3988 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3990 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3991 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3992 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3994 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3995 break;
3998 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3999 == GF_OMP_FOR_KIND_SIMD)
4000 && omp_maybe_offloaded_ctx (ctx)
4001 && omp_max_simt_vf ()
4002 && gimple_omp_for_collapse (stmt) == 1)
4003 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4004 else
4005 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4006 break;
4008 case GIMPLE_OMP_SECTIONS:
4009 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4010 break;
4012 case GIMPLE_OMP_SINGLE:
4013 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4014 break;
4016 case GIMPLE_OMP_SCAN:
4017 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4019 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4020 ctx->scan_inclusive = true;
4021 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4022 ctx->scan_exclusive = true;
4024 /* FALLTHRU */
4025 case GIMPLE_OMP_SECTION:
4026 case GIMPLE_OMP_MASTER:
4027 case GIMPLE_OMP_ORDERED:
4028 case GIMPLE_OMP_CRITICAL:
4029 ctx = new_omp_context (stmt, ctx);
4030 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4031 break;
4033 case GIMPLE_OMP_TASKGROUP:
4034 ctx = new_omp_context (stmt, ctx);
4035 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4036 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4037 break;
4039 case GIMPLE_OMP_TARGET:
4040 if (is_gimple_omp_offloaded (stmt))
4042 taskreg_nesting_level++;
4043 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4044 taskreg_nesting_level--;
4046 else
4047 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4048 break;
4050 case GIMPLE_OMP_TEAMS:
4051 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4053 taskreg_nesting_level++;
4054 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4055 taskreg_nesting_level--;
4057 else
4058 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4059 break;
4061 case GIMPLE_BIND:
4063 tree var;
4065 *handled_ops_p = false;
4066 if (ctx)
4067 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4068 var ;
4069 var = DECL_CHAIN (var))
4070 insert_decl_map (&ctx->cb, var, var);
4072 break;
4073 default:
4074 *handled_ops_p = false;
4075 break;
4078 return NULL_TREE;
4082 /* Scan all the statements starting at the current statement. CTX
4083 contains context information about the OMP directives and
4084 clauses found during the scan. */
4086 static void
4087 scan_omp (gimple_seq *body_p, omp_context *ctx)
4089 location_t saved_location;
4090 struct walk_stmt_info wi;
4092 memset (&wi, 0, sizeof (wi));
4093 wi.info = ctx;
4094 wi.want_locations = true;
4096 saved_location = input_location;
4097 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4098 input_location = saved_location;
4101 /* Re-gimplification and code generation routines. */
4103 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4104 of BIND if in a method. */
4106 static void
4107 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4109 if (DECL_ARGUMENTS (current_function_decl)
4110 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4111 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4112 == POINTER_TYPE))
4114 tree vars = gimple_bind_vars (bind);
4115 for (tree *pvar = &vars; *pvar; )
4116 if (omp_member_access_dummy_var (*pvar))
4117 *pvar = DECL_CHAIN (*pvar);
4118 else
4119 pvar = &DECL_CHAIN (*pvar);
4120 gimple_bind_set_vars (bind, vars);
4124 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4125 block and its subblocks. */
4127 static void
4128 remove_member_access_dummy_vars (tree block)
4130 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4131 if (omp_member_access_dummy_var (*pvar))
4132 *pvar = DECL_CHAIN (*pvar);
4133 else
4134 pvar = &DECL_CHAIN (*pvar);
4136 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4137 remove_member_access_dummy_vars (block);
4140 /* If a context was created for STMT when it was scanned, return it. */
4142 static omp_context *
4143 maybe_lookup_ctx (gimple *stmt)
4145 splay_tree_node n;
4146 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4147 return n ? (omp_context *) n->value : NULL;
4151 /* Find the mapping for DECL in CTX or the immediately enclosing
4152 context that has a mapping for DECL.
4154 If CTX is a nested parallel directive, we may have to use the decl
4155 mappings created in CTX's parent context. Suppose that we have the
4156 following parallel nesting (variable UIDs showed for clarity):
4158 iD.1562 = 0;
4159 #omp parallel shared(iD.1562) -> outer parallel
4160 iD.1562 = iD.1562 + 1;
4162 #omp parallel shared (iD.1562) -> inner parallel
4163 iD.1562 = iD.1562 - 1;
4165 Each parallel structure will create a distinct .omp_data_s structure
4166 for copying iD.1562 in/out of the directive:
4168 outer parallel .omp_data_s.1.i -> iD.1562
4169 inner parallel .omp_data_s.2.i -> iD.1562
4171 A shared variable mapping will produce a copy-out operation before
4172 the parallel directive and a copy-in operation after it. So, in
4173 this case we would have:
4175 iD.1562 = 0;
4176 .omp_data_o.1.i = iD.1562;
4177 #omp parallel shared(iD.1562) -> outer parallel
4178 .omp_data_i.1 = &.omp_data_o.1
4179 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4181 .omp_data_o.2.i = iD.1562; -> **
4182 #omp parallel shared(iD.1562) -> inner parallel
4183 .omp_data_i.2 = &.omp_data_o.2
4184 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4187 ** This is a problem. The symbol iD.1562 cannot be referenced
4188 inside the body of the outer parallel region. But since we are
4189 emitting this copy operation while expanding the inner parallel
4190 directive, we need to access the CTX structure of the outer
4191 parallel directive to get the correct mapping:
4193 .omp_data_o.2.i = .omp_data_i.1->i
4195 Since there may be other workshare or parallel directives enclosing
4196 the parallel directive, it may be necessary to walk up the context
4197 parent chain. This is not a problem in general because nested
4198 parallelism happens only rarely. */
4200 static tree
4201 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4203 tree t;
4204 omp_context *up;
4206 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4207 t = maybe_lookup_decl (decl, up);
4209 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4211 return t ? t : decl;
4215 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4216 in outer contexts. */
4218 static tree
4219 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4221 tree t = NULL;
4222 omp_context *up;
4224 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4225 t = maybe_lookup_decl (decl, up);
4227 return t ? t : decl;
4231 /* Construct the initialization value for reduction operation OP. */
4233 tree
4234 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4236 switch (op)
4238 case PLUS_EXPR:
4239 case MINUS_EXPR:
4240 case BIT_IOR_EXPR:
4241 case BIT_XOR_EXPR:
4242 case TRUTH_OR_EXPR:
4243 case TRUTH_ORIF_EXPR:
4244 case TRUTH_XOR_EXPR:
4245 case NE_EXPR:
4246 return build_zero_cst (type);
4248 case MULT_EXPR:
4249 case TRUTH_AND_EXPR:
4250 case TRUTH_ANDIF_EXPR:
4251 case EQ_EXPR:
4252 return fold_convert_loc (loc, type, integer_one_node);
4254 case BIT_AND_EXPR:
4255 return fold_convert_loc (loc, type, integer_minus_one_node);
4257 case MAX_EXPR:
4258 if (SCALAR_FLOAT_TYPE_P (type))
4260 REAL_VALUE_TYPE max, min;
4261 if (HONOR_INFINITIES (type))
4263 real_inf (&max);
4264 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4266 else
4267 real_maxval (&min, 1, TYPE_MODE (type));
4268 return build_real (type, min);
4270 else if (POINTER_TYPE_P (type))
4272 wide_int min
4273 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4274 return wide_int_to_tree (type, min);
4276 else
4278 gcc_assert (INTEGRAL_TYPE_P (type));
4279 return TYPE_MIN_VALUE (type);
4282 case MIN_EXPR:
4283 if (SCALAR_FLOAT_TYPE_P (type))
4285 REAL_VALUE_TYPE max;
4286 if (HONOR_INFINITIES (type))
4287 real_inf (&max);
4288 else
4289 real_maxval (&max, 0, TYPE_MODE (type));
4290 return build_real (type, max);
4292 else if (POINTER_TYPE_P (type))
4294 wide_int max
4295 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4296 return wide_int_to_tree (type, max);
4298 else
4300 gcc_assert (INTEGRAL_TYPE_P (type));
4301 return TYPE_MAX_VALUE (type);
4304 default:
4305 gcc_unreachable ();
4309 /* Construct the initialization value for reduction CLAUSE. */
4311 tree
4312 omp_reduction_init (tree clause, tree type)
4314 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4315 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4318 /* Return alignment to be assumed for var in CLAUSE, which should be
4319 OMP_CLAUSE_ALIGNED. */
4321 static tree
4322 omp_clause_aligned_alignment (tree clause)
4324 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4325 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4327 /* Otherwise return implementation defined alignment. */
4328 unsigned int al = 1;
4329 opt_scalar_mode mode_iter;
4330 auto_vector_modes modes;
4331 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4332 static enum mode_class classes[]
4333 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4334 for (int i = 0; i < 4; i += 2)
4335 /* The for loop above dictates that we only walk through scalar classes. */
4336 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4338 scalar_mode mode = mode_iter.require ();
4339 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4340 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4341 continue;
4342 machine_mode alt_vmode;
4343 for (unsigned int j = 0; j < modes.length (); ++j)
4344 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4345 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4346 vmode = alt_vmode;
4348 tree type = lang_hooks.types.type_for_mode (mode, 1);
4349 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4350 continue;
4351 type = build_vector_type_for_mode (type, vmode);
4352 if (TYPE_MODE (type) != vmode)
4353 continue;
4354 if (TYPE_ALIGN_UNIT (type) > al)
4355 al = TYPE_ALIGN_UNIT (type);
4357 return build_int_cst (integer_type_node, al);
4361 /* This structure is part of the interface between lower_rec_simd_input_clauses
4362 and lower_rec_input_clauses. */
4364 class omplow_simd_context {
4365 public:
4366 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4367 tree idx;
4368 tree lane;
4369 tree lastlane;
4370 vec<tree, va_heap> simt_eargs;
4371 gimple_seq simt_dlist;
4372 poly_uint64_pod max_vf;
4373 bool is_simt;
4376 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4377 privatization. */
4379 static bool
4380 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4381 omplow_simd_context *sctx, tree &ivar,
4382 tree &lvar, tree *rvar = NULL,
4383 tree *rvar2 = NULL)
4385 if (known_eq (sctx->max_vf, 0U))
4387 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4388 if (maybe_gt (sctx->max_vf, 1U))
4390 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4391 OMP_CLAUSE_SAFELEN);
4392 if (c)
4394 poly_uint64 safe_len;
4395 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4396 || maybe_lt (safe_len, 1U))
4397 sctx->max_vf = 1;
4398 else
4399 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4402 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4404 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4405 c = OMP_CLAUSE_CHAIN (c))
4407 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4408 continue;
4410 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4412 /* UDR reductions are not supported yet for SIMT, disable
4413 SIMT. */
4414 sctx->max_vf = 1;
4415 break;
4418 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4419 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4421 /* Doing boolean operations on non-integral types is
4422 for conformance only, it's not worth supporting this
4423 for SIMT. */
4424 sctx->max_vf = 1;
4425 break;
4429 if (maybe_gt (sctx->max_vf, 1U))
4431 sctx->idx = create_tmp_var (unsigned_type_node);
4432 sctx->lane = create_tmp_var (unsigned_type_node);
4435 if (known_eq (sctx->max_vf, 1U))
4436 return false;
4438 if (sctx->is_simt)
4440 if (is_gimple_reg (new_var))
4442 ivar = lvar = new_var;
4443 return true;
4445 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4446 ivar = lvar = create_tmp_var (type);
4447 TREE_ADDRESSABLE (ivar) = 1;
4448 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4449 NULL, DECL_ATTRIBUTES (ivar));
4450 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4451 tree clobber = build_clobber (type);
4452 gimple *g = gimple_build_assign (ivar, clobber);
4453 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4455 else
4457 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4458 tree avar = create_tmp_var_raw (atype);
4459 if (TREE_ADDRESSABLE (new_var))
4460 TREE_ADDRESSABLE (avar) = 1;
4461 DECL_ATTRIBUTES (avar)
4462 = tree_cons (get_identifier ("omp simd array"), NULL,
4463 DECL_ATTRIBUTES (avar));
4464 gimple_add_tmp_var (avar);
4465 tree iavar = avar;
4466 if (rvar && !ctx->for_simd_scan_phase)
4468 /* For inscan reductions, create another array temporary,
4469 which will hold the reduced value. */
4470 iavar = create_tmp_var_raw (atype);
4471 if (TREE_ADDRESSABLE (new_var))
4472 TREE_ADDRESSABLE (iavar) = 1;
4473 DECL_ATTRIBUTES (iavar)
4474 = tree_cons (get_identifier ("omp simd array"), NULL,
4475 tree_cons (get_identifier ("omp simd inscan"), NULL,
4476 DECL_ATTRIBUTES (iavar)));
4477 gimple_add_tmp_var (iavar);
4478 ctx->cb.decl_map->put (avar, iavar);
4479 if (sctx->lastlane == NULL_TREE)
4480 sctx->lastlane = create_tmp_var (unsigned_type_node);
4481 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4482 sctx->lastlane, NULL_TREE, NULL_TREE);
4483 TREE_THIS_NOTRAP (*rvar) = 1;
4485 if (ctx->scan_exclusive)
4487 /* And for exclusive scan yet another one, which will
4488 hold the value during the scan phase. */
4489 tree savar = create_tmp_var_raw (atype);
4490 if (TREE_ADDRESSABLE (new_var))
4491 TREE_ADDRESSABLE (savar) = 1;
4492 DECL_ATTRIBUTES (savar)
4493 = tree_cons (get_identifier ("omp simd array"), NULL,
4494 tree_cons (get_identifier ("omp simd inscan "
4495 "exclusive"), NULL,
4496 DECL_ATTRIBUTES (savar)));
4497 gimple_add_tmp_var (savar);
4498 ctx->cb.decl_map->put (iavar, savar);
4499 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4500 sctx->idx, NULL_TREE, NULL_TREE);
4501 TREE_THIS_NOTRAP (*rvar2) = 1;
4504 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4505 NULL_TREE, NULL_TREE);
4506 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4507 NULL_TREE, NULL_TREE);
4508 TREE_THIS_NOTRAP (ivar) = 1;
4509 TREE_THIS_NOTRAP (lvar) = 1;
4511 if (DECL_P (new_var))
4513 SET_DECL_VALUE_EXPR (new_var, lvar);
4514 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4516 return true;
4519 /* Helper function of lower_rec_input_clauses. For a reference
4520 in simd reduction, add an underlying variable it will reference. */
4522 static void
4523 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4525 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4526 if (TREE_CONSTANT (z))
4528 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4529 get_name (new_vard));
4530 gimple_add_tmp_var (z);
4531 TREE_ADDRESSABLE (z) = 1;
4532 z = build_fold_addr_expr_loc (loc, z);
4533 gimplify_assign (new_vard, z, ilist);
4537 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4538 code to emit (type) (tskred_temp[idx]). */
4540 static tree
4541 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4542 unsigned idx)
4544 unsigned HOST_WIDE_INT sz
4545 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4546 tree r = build2 (MEM_REF, pointer_sized_int_node,
4547 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4548 idx * sz));
4549 tree v = create_tmp_var (pointer_sized_int_node);
4550 gimple *g = gimple_build_assign (v, r);
4551 gimple_seq_add_stmt (ilist, g);
4552 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4554 v = create_tmp_var (type);
4555 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4556 gimple_seq_add_stmt (ilist, g);
4558 return v;
4561 /* Lower early initialization of privatized variable NEW_VAR
4562 if it needs an allocator (has allocate clause). */
4564 static bool
4565 lower_private_allocate (tree var, tree new_var, tree &allocator,
4566 tree &allocate_ptr, gimple_seq *ilist,
4567 omp_context *ctx, bool is_ref, tree size)
4569 if (allocator)
4570 return false;
4571 gcc_assert (allocate_ptr == NULL_TREE);
4572 if (ctx->allocate_map
4573 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4574 if (tree *allocatorp = ctx->allocate_map->get (var))
4575 allocator = *allocatorp;
4576 if (allocator == NULL_TREE)
4577 return false;
4578 if (!is_ref && omp_is_reference (var))
4580 allocator = NULL_TREE;
4581 return false;
4584 if (TREE_CODE (allocator) != INTEGER_CST)
4585 allocator = build_outer_var_ref (allocator, ctx);
4586 allocator = fold_convert (pointer_sized_int_node, allocator);
4587 if (TREE_CODE (allocator) != INTEGER_CST)
4589 tree var = create_tmp_var (TREE_TYPE (allocator));
4590 gimplify_assign (var, allocator, ilist);
4591 allocator = var;
4594 tree ptr_type, align, sz = size;
4595 if (TYPE_P (new_var))
4597 ptr_type = build_pointer_type (new_var);
4598 align = build_int_cst (size_type_node, TYPE_ALIGN_UNIT (new_var));
4600 else if (is_ref)
4602 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4603 align = build_int_cst (size_type_node,
4604 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4606 else
4608 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4609 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (new_var));
4610 if (sz == NULL_TREE)
4611 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4613 if (TREE_CODE (sz) != INTEGER_CST)
4615 tree szvar = create_tmp_var (size_type_node);
4616 gimplify_assign (szvar, sz, ilist);
4617 sz = szvar;
4619 allocate_ptr = create_tmp_var (ptr_type);
4620 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4621 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4622 gimple_call_set_lhs (g, allocate_ptr);
4623 gimple_seq_add_stmt (ilist, g);
4624 if (!is_ref)
4626 tree x = build_simple_mem_ref (allocate_ptr);
4627 TREE_THIS_NOTRAP (x) = 1;
4628 SET_DECL_VALUE_EXPR (new_var, x);
4629 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4631 return true;
4634 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4635 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4636 private variables. Initialization statements go in ILIST, while calls
4637 to destructors go in DLIST. */
4639 static void
4640 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4641 omp_context *ctx, struct omp_for_data *fd)
4643 tree c, copyin_seq, x, ptr;
4644 bool copyin_by_ref = false;
4645 bool lastprivate_firstprivate = false;
4646 bool reduction_omp_orig_ref = false;
4647 int pass;
4648 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4649 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4650 omplow_simd_context sctx = omplow_simd_context ();
4651 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4652 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4653 gimple_seq llist[4] = { };
4654 tree nonconst_simd_if = NULL_TREE;
4656 copyin_seq = NULL;
4657 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4659 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4660 with data sharing clauses referencing variable sized vars. That
4661 is unnecessarily hard to support and very unlikely to result in
4662 vectorized code anyway. */
4663 if (is_simd)
4664 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4665 switch (OMP_CLAUSE_CODE (c))
4667 case OMP_CLAUSE_LINEAR:
4668 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4669 sctx.max_vf = 1;
4670 /* FALLTHRU */
4671 case OMP_CLAUSE_PRIVATE:
4672 case OMP_CLAUSE_FIRSTPRIVATE:
4673 case OMP_CLAUSE_LASTPRIVATE:
4674 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4675 sctx.max_vf = 1;
4676 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4678 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4679 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4680 sctx.max_vf = 1;
4682 break;
4683 case OMP_CLAUSE_REDUCTION:
4684 case OMP_CLAUSE_IN_REDUCTION:
4685 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4686 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4687 sctx.max_vf = 1;
4688 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4690 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4691 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4692 sctx.max_vf = 1;
4694 break;
4695 case OMP_CLAUSE_IF:
4696 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4697 sctx.max_vf = 1;
4698 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4699 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4700 break;
4701 case OMP_CLAUSE_SIMDLEN:
4702 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4703 sctx.max_vf = 1;
4704 break;
4705 case OMP_CLAUSE__CONDTEMP_:
4706 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4707 if (sctx.is_simt)
4708 sctx.max_vf = 1;
4709 break;
4710 default:
4711 continue;
4714 /* Add a placeholder for simduid. */
4715 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4716 sctx.simt_eargs.safe_push (NULL_TREE);
4718 unsigned task_reduction_cnt = 0;
4719 unsigned task_reduction_cntorig = 0;
4720 unsigned task_reduction_cnt_full = 0;
4721 unsigned task_reduction_cntorig_full = 0;
4722 unsigned task_reduction_other_cnt = 0;
4723 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4724 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4725 /* Do all the fixed sized types in the first pass, and the variable sized
4726 types in the second pass. This makes sure that the scalar arguments to
4727 the variable sized types are processed before we use them in the
4728 variable sized operations. For task reductions we use 4 passes, in the
4729 first two we ignore them, in the third one gather arguments for
4730 GOMP_task_reduction_remap call and in the last pass actually handle
4731 the task reductions. */
4732 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4733 ? 4 : 2); ++pass)
4735 if (pass == 2 && task_reduction_cnt)
4737 tskred_atype
4738 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4739 + task_reduction_cntorig);
4740 tskred_avar = create_tmp_var_raw (tskred_atype);
4741 gimple_add_tmp_var (tskred_avar);
4742 TREE_ADDRESSABLE (tskred_avar) = 1;
4743 task_reduction_cnt_full = task_reduction_cnt;
4744 task_reduction_cntorig_full = task_reduction_cntorig;
4746 else if (pass == 3 && task_reduction_cnt)
4748 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4749 gimple *g
4750 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4751 size_int (task_reduction_cntorig),
4752 build_fold_addr_expr (tskred_avar));
4753 gimple_seq_add_stmt (ilist, g);
4755 if (pass == 3 && task_reduction_other_cnt)
4757 /* For reduction clauses, build
4758 tskred_base = (void *) tskred_temp[2]
4759 + omp_get_thread_num () * tskred_temp[1]
4760 or if tskred_temp[1] is known to be constant, that constant
4761 directly. This is the start of the private reduction copy block
4762 for the current thread. */
4763 tree v = create_tmp_var (integer_type_node);
4764 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4765 gimple *g = gimple_build_call (x, 0);
4766 gimple_call_set_lhs (g, v);
4767 gimple_seq_add_stmt (ilist, g);
4768 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4769 tskred_temp = OMP_CLAUSE_DECL (c);
4770 if (is_taskreg_ctx (ctx))
4771 tskred_temp = lookup_decl (tskred_temp, ctx);
4772 tree v2 = create_tmp_var (sizetype);
4773 g = gimple_build_assign (v2, NOP_EXPR, v);
4774 gimple_seq_add_stmt (ilist, g);
4775 if (ctx->task_reductions[0])
4776 v = fold_convert (sizetype, ctx->task_reductions[0]);
4777 else
4778 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4779 tree v3 = create_tmp_var (sizetype);
4780 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4781 gimple_seq_add_stmt (ilist, g);
4782 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4783 tskred_base = create_tmp_var (ptr_type_node);
4784 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4785 gimple_seq_add_stmt (ilist, g);
4787 task_reduction_cnt = 0;
4788 task_reduction_cntorig = 0;
4789 task_reduction_other_cnt = 0;
4790 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4792 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4793 tree var, new_var;
4794 bool by_ref;
4795 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4796 bool task_reduction_p = false;
4797 bool task_reduction_needs_orig_p = false;
4798 tree cond = NULL_TREE;
4799 tree allocator, allocate_ptr;
4801 switch (c_kind)
4803 case OMP_CLAUSE_PRIVATE:
4804 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4805 continue;
4806 break;
4807 case OMP_CLAUSE_SHARED:
4808 /* Ignore shared directives in teams construct inside
4809 of target construct. */
4810 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4811 && !is_host_teams_ctx (ctx))
4812 continue;
4813 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4815 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4816 || is_global_var (OMP_CLAUSE_DECL (c)));
4817 continue;
4819 case OMP_CLAUSE_FIRSTPRIVATE:
4820 case OMP_CLAUSE_COPYIN:
4821 break;
4822 case OMP_CLAUSE_LINEAR:
4823 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4824 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4825 lastprivate_firstprivate = true;
4826 break;
4827 case OMP_CLAUSE_REDUCTION:
4828 case OMP_CLAUSE_IN_REDUCTION:
4829 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4831 task_reduction_p = true;
4832 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4834 task_reduction_other_cnt++;
4835 if (pass == 2)
4836 continue;
4838 else
4839 task_reduction_cnt++;
4840 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4842 var = OMP_CLAUSE_DECL (c);
4843 /* If var is a global variable that isn't privatized
4844 in outer contexts, we don't need to look up the
4845 original address, it is always the address of the
4846 global variable itself. */
4847 if (!DECL_P (var)
4848 || omp_is_reference (var)
4849 || !is_global_var
4850 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4852 task_reduction_needs_orig_p = true;
4853 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4854 task_reduction_cntorig++;
4858 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4859 reduction_omp_orig_ref = true;
4860 break;
4861 case OMP_CLAUSE__REDUCTEMP_:
4862 if (!is_taskreg_ctx (ctx))
4863 continue;
4864 /* FALLTHRU */
4865 case OMP_CLAUSE__LOOPTEMP_:
4866 /* Handle _looptemp_/_reductemp_ clauses only on
4867 parallel/task. */
4868 if (fd)
4869 continue;
4870 break;
4871 case OMP_CLAUSE_LASTPRIVATE:
4872 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4874 lastprivate_firstprivate = true;
4875 if (pass != 0 || is_taskloop_ctx (ctx))
4876 continue;
4878 /* Even without corresponding firstprivate, if
4879 decl is Fortran allocatable, it needs outer var
4880 reference. */
4881 else if (pass == 0
4882 && lang_hooks.decls.omp_private_outer_ref
4883 (OMP_CLAUSE_DECL (c)))
4884 lastprivate_firstprivate = true;
4885 break;
4886 case OMP_CLAUSE_ALIGNED:
4887 if (pass != 1)
4888 continue;
4889 var = OMP_CLAUSE_DECL (c);
4890 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4891 && !is_global_var (var))
4893 new_var = maybe_lookup_decl (var, ctx);
4894 if (new_var == NULL_TREE)
4895 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4896 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4897 tree alarg = omp_clause_aligned_alignment (c);
4898 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4899 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4900 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4901 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4902 gimplify_and_add (x, ilist);
4904 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4905 && is_global_var (var))
4907 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4908 new_var = lookup_decl (var, ctx);
4909 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4910 t = build_fold_addr_expr_loc (clause_loc, t);
4911 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4912 tree alarg = omp_clause_aligned_alignment (c);
4913 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4914 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4915 t = fold_convert_loc (clause_loc, ptype, t);
4916 x = create_tmp_var (ptype);
4917 t = build2 (MODIFY_EXPR, ptype, x, t);
4918 gimplify_and_add (t, ilist);
4919 t = build_simple_mem_ref_loc (clause_loc, x);
4920 SET_DECL_VALUE_EXPR (new_var, t);
4921 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4923 continue;
4924 case OMP_CLAUSE__CONDTEMP_:
4925 if (is_parallel_ctx (ctx)
4926 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4927 break;
4928 continue;
4929 default:
4930 continue;
4933 if (task_reduction_p != (pass >= 2))
4934 continue;
4936 allocator = NULL_TREE;
4937 allocate_ptr = NULL_TREE;
4938 new_var = var = OMP_CLAUSE_DECL (c);
4939 if ((c_kind == OMP_CLAUSE_REDUCTION
4940 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4941 && TREE_CODE (var) == MEM_REF)
4943 var = TREE_OPERAND (var, 0);
4944 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4945 var = TREE_OPERAND (var, 0);
4946 if (TREE_CODE (var) == INDIRECT_REF
4947 || TREE_CODE (var) == ADDR_EXPR)
4948 var = TREE_OPERAND (var, 0);
4949 if (is_variable_sized (var))
4951 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4952 var = DECL_VALUE_EXPR (var);
4953 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4954 var = TREE_OPERAND (var, 0);
4955 gcc_assert (DECL_P (var));
4957 new_var = var;
4959 if (c_kind != OMP_CLAUSE_COPYIN)
4960 new_var = lookup_decl (var, ctx);
4962 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4964 if (pass != 0)
4965 continue;
4967 /* C/C++ array section reductions. */
4968 else if ((c_kind == OMP_CLAUSE_REDUCTION
4969 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4970 && var != OMP_CLAUSE_DECL (c))
4972 if (pass == 0)
4973 continue;
4975 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4976 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4978 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4980 tree b = TREE_OPERAND (orig_var, 1);
4981 b = maybe_lookup_decl (b, ctx);
4982 if (b == NULL)
4984 b = TREE_OPERAND (orig_var, 1);
4985 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4987 if (integer_zerop (bias))
4988 bias = b;
4989 else
4991 bias = fold_convert_loc (clause_loc,
4992 TREE_TYPE (b), bias);
4993 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4994 TREE_TYPE (b), b, bias);
4996 orig_var = TREE_OPERAND (orig_var, 0);
4998 if (pass == 2)
5000 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5001 if (is_global_var (out)
5002 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5003 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5004 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5005 != POINTER_TYPE)))
5006 x = var;
5007 else
5009 bool by_ref = use_pointer_for_field (var, NULL);
5010 x = build_receiver_ref (var, by_ref, ctx);
5011 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5012 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5013 == POINTER_TYPE))
5014 x = build_fold_addr_expr (x);
5016 if (TREE_CODE (orig_var) == INDIRECT_REF)
5017 x = build_simple_mem_ref (x);
5018 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5020 if (var == TREE_OPERAND (orig_var, 0))
5021 x = build_fold_addr_expr (x);
5023 bias = fold_convert (sizetype, bias);
5024 x = fold_convert (ptr_type_node, x);
5025 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5026 TREE_TYPE (x), x, bias);
5027 unsigned cnt = task_reduction_cnt - 1;
5028 if (!task_reduction_needs_orig_p)
5029 cnt += (task_reduction_cntorig_full
5030 - task_reduction_cntorig);
5031 else
5032 cnt = task_reduction_cntorig - 1;
5033 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5034 size_int (cnt), NULL_TREE, NULL_TREE);
5035 gimplify_assign (r, x, ilist);
5036 continue;
5039 if (TREE_CODE (orig_var) == INDIRECT_REF
5040 || TREE_CODE (orig_var) == ADDR_EXPR)
5041 orig_var = TREE_OPERAND (orig_var, 0);
5042 tree d = OMP_CLAUSE_DECL (c);
5043 tree type = TREE_TYPE (d);
5044 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5045 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5046 tree sz = v;
5047 const char *name = get_name (orig_var);
5048 if (pass != 3 && !TREE_CONSTANT (v))
5050 tree t = maybe_lookup_decl (v, ctx);
5051 if (t)
5052 v = t;
5053 else
5054 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5055 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5056 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5057 TREE_TYPE (v), v,
5058 build_int_cst (TREE_TYPE (v), 1));
5059 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5060 TREE_TYPE (v), t,
5061 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5063 if (pass == 3)
5065 tree xv = create_tmp_var (ptr_type_node);
5066 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5068 unsigned cnt = task_reduction_cnt - 1;
5069 if (!task_reduction_needs_orig_p)
5070 cnt += (task_reduction_cntorig_full
5071 - task_reduction_cntorig);
5072 else
5073 cnt = task_reduction_cntorig - 1;
5074 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5075 size_int (cnt), NULL_TREE, NULL_TREE);
5077 gimple *g = gimple_build_assign (xv, x);
5078 gimple_seq_add_stmt (ilist, g);
5080 else
5082 unsigned int idx = *ctx->task_reduction_map->get (c);
5083 tree off;
5084 if (ctx->task_reductions[1 + idx])
5085 off = fold_convert (sizetype,
5086 ctx->task_reductions[1 + idx]);
5087 else
5088 off = task_reduction_read (ilist, tskred_temp, sizetype,
5089 7 + 3 * idx + 1);
5090 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5091 tskred_base, off);
5092 gimple_seq_add_stmt (ilist, g);
5094 x = fold_convert (build_pointer_type (boolean_type_node),
5095 xv);
5096 if (TREE_CONSTANT (v))
5097 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5098 TYPE_SIZE_UNIT (type));
5099 else
5101 tree t = maybe_lookup_decl (v, ctx);
5102 if (t)
5103 v = t;
5104 else
5105 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5106 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5107 fb_rvalue);
5108 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5109 TREE_TYPE (v), v,
5110 build_int_cst (TREE_TYPE (v), 1));
5111 t = fold_build2_loc (clause_loc, MULT_EXPR,
5112 TREE_TYPE (v), t,
5113 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5114 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5116 cond = create_tmp_var (TREE_TYPE (x));
5117 gimplify_assign (cond, x, ilist);
5118 x = xv;
5120 else if (lower_private_allocate (var, type, allocator,
5121 allocate_ptr, ilist, ctx,
5122 true,
5123 TREE_CONSTANT (v)
5124 ? TYPE_SIZE_UNIT (type)
5125 : sz))
5126 x = allocate_ptr;
5127 else if (TREE_CONSTANT (v))
5129 x = create_tmp_var_raw (type, name);
5130 gimple_add_tmp_var (x);
5131 TREE_ADDRESSABLE (x) = 1;
5132 x = build_fold_addr_expr_loc (clause_loc, x);
5134 else
5136 tree atmp
5137 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5138 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5139 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5142 tree ptype = build_pointer_type (TREE_TYPE (type));
5143 x = fold_convert_loc (clause_loc, ptype, x);
5144 tree y = create_tmp_var (ptype, name);
5145 gimplify_assign (y, x, ilist);
5146 x = y;
5147 tree yb = y;
5149 if (!integer_zerop (bias))
5151 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5152 bias);
5153 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5155 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5156 pointer_sized_int_node, yb, bias);
5157 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5158 yb = create_tmp_var (ptype, name);
5159 gimplify_assign (yb, x, ilist);
5160 x = yb;
5163 d = TREE_OPERAND (d, 0);
5164 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5165 d = TREE_OPERAND (d, 0);
5166 if (TREE_CODE (d) == ADDR_EXPR)
5168 if (orig_var != var)
5170 gcc_assert (is_variable_sized (orig_var));
5171 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5173 gimplify_assign (new_var, x, ilist);
5174 tree new_orig_var = lookup_decl (orig_var, ctx);
5175 tree t = build_fold_indirect_ref (new_var);
5176 DECL_IGNORED_P (new_var) = 0;
5177 TREE_THIS_NOTRAP (t) = 1;
5178 SET_DECL_VALUE_EXPR (new_orig_var, t);
5179 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5181 else
5183 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5184 build_int_cst (ptype, 0));
5185 SET_DECL_VALUE_EXPR (new_var, x);
5186 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5189 else
5191 gcc_assert (orig_var == var);
5192 if (TREE_CODE (d) == INDIRECT_REF)
5194 x = create_tmp_var (ptype, name);
5195 TREE_ADDRESSABLE (x) = 1;
5196 gimplify_assign (x, yb, ilist);
5197 x = build_fold_addr_expr_loc (clause_loc, x);
5199 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5200 gimplify_assign (new_var, x, ilist);
5202 /* GOMP_taskgroup_reduction_register memsets the whole
5203 array to zero. If the initializer is zero, we don't
5204 need to initialize it again, just mark it as ever
5205 used unconditionally, i.e. cond = true. */
5206 if (cond
5207 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5208 && initializer_zerop (omp_reduction_init (c,
5209 TREE_TYPE (type))))
5211 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5212 boolean_true_node);
5213 gimple_seq_add_stmt (ilist, g);
5214 continue;
5216 tree end = create_artificial_label (UNKNOWN_LOCATION);
5217 if (cond)
5219 gimple *g;
5220 if (!is_parallel_ctx (ctx))
5222 tree condv = create_tmp_var (boolean_type_node);
5223 g = gimple_build_assign (condv,
5224 build_simple_mem_ref (cond));
5225 gimple_seq_add_stmt (ilist, g);
5226 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5227 g = gimple_build_cond (NE_EXPR, condv,
5228 boolean_false_node, end, lab1);
5229 gimple_seq_add_stmt (ilist, g);
5230 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5232 g = gimple_build_assign (build_simple_mem_ref (cond),
5233 boolean_true_node);
5234 gimple_seq_add_stmt (ilist, g);
5237 tree y1 = create_tmp_var (ptype);
5238 gimplify_assign (y1, y, ilist);
5239 tree i2 = NULL_TREE, y2 = NULL_TREE;
5240 tree body2 = NULL_TREE, end2 = NULL_TREE;
5241 tree y3 = NULL_TREE, y4 = NULL_TREE;
5242 if (task_reduction_needs_orig_p)
5244 y3 = create_tmp_var (ptype);
5245 tree ref;
5246 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5247 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5248 size_int (task_reduction_cnt_full
5249 + task_reduction_cntorig - 1),
5250 NULL_TREE, NULL_TREE);
5251 else
5253 unsigned int idx = *ctx->task_reduction_map->get (c);
5254 ref = task_reduction_read (ilist, tskred_temp, ptype,
5255 7 + 3 * idx);
5257 gimplify_assign (y3, ref, ilist);
5259 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5261 if (pass != 3)
5263 y2 = create_tmp_var (ptype);
5264 gimplify_assign (y2, y, ilist);
5266 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5268 tree ref = build_outer_var_ref (var, ctx);
5269 /* For ref build_outer_var_ref already performs this. */
5270 if (TREE_CODE (d) == INDIRECT_REF)
5271 gcc_assert (omp_is_reference (var));
5272 else if (TREE_CODE (d) == ADDR_EXPR)
5273 ref = build_fold_addr_expr (ref);
5274 else if (omp_is_reference (var))
5275 ref = build_fold_addr_expr (ref);
5276 ref = fold_convert_loc (clause_loc, ptype, ref);
5277 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5278 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5280 y3 = create_tmp_var (ptype);
5281 gimplify_assign (y3, unshare_expr (ref), ilist);
5283 if (is_simd)
5285 y4 = create_tmp_var (ptype);
5286 gimplify_assign (y4, ref, dlist);
5290 tree i = create_tmp_var (TREE_TYPE (v));
5291 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5292 tree body = create_artificial_label (UNKNOWN_LOCATION);
5293 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5294 if (y2)
5296 i2 = create_tmp_var (TREE_TYPE (v));
5297 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5298 body2 = create_artificial_label (UNKNOWN_LOCATION);
5299 end2 = create_artificial_label (UNKNOWN_LOCATION);
5300 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5302 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5304 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5305 tree decl_placeholder
5306 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5307 SET_DECL_VALUE_EXPR (decl_placeholder,
5308 build_simple_mem_ref (y1));
5309 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5310 SET_DECL_VALUE_EXPR (placeholder,
5311 y3 ? build_simple_mem_ref (y3)
5312 : error_mark_node);
5313 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5314 x = lang_hooks.decls.omp_clause_default_ctor
5315 (c, build_simple_mem_ref (y1),
5316 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5317 if (x)
5318 gimplify_and_add (x, ilist);
5319 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5321 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5322 lower_omp (&tseq, ctx);
5323 gimple_seq_add_seq (ilist, tseq);
5325 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5326 if (is_simd)
5328 SET_DECL_VALUE_EXPR (decl_placeholder,
5329 build_simple_mem_ref (y2));
5330 SET_DECL_VALUE_EXPR (placeholder,
5331 build_simple_mem_ref (y4));
5332 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5333 lower_omp (&tseq, ctx);
5334 gimple_seq_add_seq (dlist, tseq);
5335 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5337 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5338 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5339 if (y2)
5341 x = lang_hooks.decls.omp_clause_dtor
5342 (c, build_simple_mem_ref (y2));
5343 if (x)
5344 gimplify_and_add (x, dlist);
5347 else
5349 x = omp_reduction_init (c, TREE_TYPE (type));
5350 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5352 /* reduction(-:var) sums up the partial results, so it
5353 acts identically to reduction(+:var). */
5354 if (code == MINUS_EXPR)
5355 code = PLUS_EXPR;
5357 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5358 if (is_simd)
5360 x = build2 (code, TREE_TYPE (type),
5361 build_simple_mem_ref (y4),
5362 build_simple_mem_ref (y2));
5363 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5366 gimple *g
5367 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5368 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5369 gimple_seq_add_stmt (ilist, g);
5370 if (y3)
5372 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5373 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5374 gimple_seq_add_stmt (ilist, g);
5376 g = gimple_build_assign (i, PLUS_EXPR, i,
5377 build_int_cst (TREE_TYPE (i), 1));
5378 gimple_seq_add_stmt (ilist, g);
5379 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5380 gimple_seq_add_stmt (ilist, g);
5381 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5382 if (y2)
5384 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5385 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5386 gimple_seq_add_stmt (dlist, g);
5387 if (y4)
5389 g = gimple_build_assign
5390 (y4, POINTER_PLUS_EXPR, y4,
5391 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5392 gimple_seq_add_stmt (dlist, g);
5394 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5395 build_int_cst (TREE_TYPE (i2), 1));
5396 gimple_seq_add_stmt (dlist, g);
5397 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5398 gimple_seq_add_stmt (dlist, g);
5399 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5401 if (allocator)
5403 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5404 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5405 gimple_seq_add_stmt (dlist, g);
5407 continue;
5409 else if (pass == 2)
5411 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5412 x = var;
5413 else
5415 bool by_ref = use_pointer_for_field (var, ctx);
5416 x = build_receiver_ref (var, by_ref, ctx);
5418 if (!omp_is_reference (var))
5419 x = build_fold_addr_expr (x);
5420 x = fold_convert (ptr_type_node, x);
5421 unsigned cnt = task_reduction_cnt - 1;
5422 if (!task_reduction_needs_orig_p)
5423 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5424 else
5425 cnt = task_reduction_cntorig - 1;
5426 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5427 size_int (cnt), NULL_TREE, NULL_TREE);
5428 gimplify_assign (r, x, ilist);
5429 continue;
5431 else if (pass == 3)
5433 tree type = TREE_TYPE (new_var);
5434 if (!omp_is_reference (var))
5435 type = build_pointer_type (type);
5436 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5438 unsigned cnt = task_reduction_cnt - 1;
5439 if (!task_reduction_needs_orig_p)
5440 cnt += (task_reduction_cntorig_full
5441 - task_reduction_cntorig);
5442 else
5443 cnt = task_reduction_cntorig - 1;
5444 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5445 size_int (cnt), NULL_TREE, NULL_TREE);
5447 else
5449 unsigned int idx = *ctx->task_reduction_map->get (c);
5450 tree off;
5451 if (ctx->task_reductions[1 + idx])
5452 off = fold_convert (sizetype,
5453 ctx->task_reductions[1 + idx]);
5454 else
5455 off = task_reduction_read (ilist, tskred_temp, sizetype,
5456 7 + 3 * idx + 1);
5457 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5458 tskred_base, off);
5460 x = fold_convert (type, x);
5461 tree t;
5462 if (omp_is_reference (var))
5464 gimplify_assign (new_var, x, ilist);
5465 t = new_var;
5466 new_var = build_simple_mem_ref (new_var);
5468 else
5470 t = create_tmp_var (type);
5471 gimplify_assign (t, x, ilist);
5472 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5473 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5475 t = fold_convert (build_pointer_type (boolean_type_node), t);
5476 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5477 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5478 cond = create_tmp_var (TREE_TYPE (t));
5479 gimplify_assign (cond, t, ilist);
5481 else if (is_variable_sized (var))
5483 /* For variable sized types, we need to allocate the
5484 actual storage here. Call alloca and store the
5485 result in the pointer decl that we created elsewhere. */
5486 if (pass == 0)
5487 continue;
5489 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5491 tree tmp;
5493 ptr = DECL_VALUE_EXPR (new_var);
5494 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5495 ptr = TREE_OPERAND (ptr, 0);
5496 gcc_assert (DECL_P (ptr));
5497 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5499 if (lower_private_allocate (var, new_var, allocator,
5500 allocate_ptr, ilist, ctx,
5501 false, x))
5502 tmp = allocate_ptr;
5503 else
5505 /* void *tmp = __builtin_alloca */
5506 tree atmp
5507 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5508 gcall *stmt
5509 = gimple_build_call (atmp, 2, x,
5510 size_int (DECL_ALIGN (var)));
5511 cfun->calls_alloca = 1;
5512 tmp = create_tmp_var_raw (ptr_type_node);
5513 gimple_add_tmp_var (tmp);
5514 gimple_call_set_lhs (stmt, tmp);
5516 gimple_seq_add_stmt (ilist, stmt);
5519 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5520 gimplify_assign (ptr, x, ilist);
5523 else if (omp_is_reference (var)
5524 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5525 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5527 /* For references that are being privatized for Fortran,
5528 allocate new backing storage for the new pointer
5529 variable. This allows us to avoid changing all the
5530 code that expects a pointer to something that expects
5531 a direct variable. */
5532 if (pass == 0)
5533 continue;
5535 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5536 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5538 x = build_receiver_ref (var, false, ctx);
5539 if (ctx->allocate_map)
5540 if (tree *allocatep = ctx->allocate_map->get (var))
5542 allocator = *allocatep;
5543 if (TREE_CODE (allocator) != INTEGER_CST)
5544 allocator = build_outer_var_ref (allocator, ctx);
5545 allocator = fold_convert (pointer_sized_int_node,
5546 allocator);
5547 allocate_ptr = unshare_expr (x);
5549 if (allocator == NULL_TREE)
5550 x = build_fold_addr_expr_loc (clause_loc, x);
5552 else if (lower_private_allocate (var, new_var, allocator,
5553 allocate_ptr,
5554 ilist, ctx, true, x))
5555 x = allocate_ptr;
5556 else if (TREE_CONSTANT (x))
5558 /* For reduction in SIMD loop, defer adding the
5559 initialization of the reference, because if we decide
5560 to use SIMD array for it, the initilization could cause
5561 expansion ICE. Ditto for other privatization clauses. */
5562 if (is_simd)
5563 x = NULL_TREE;
5564 else
5566 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5567 get_name (var));
5568 gimple_add_tmp_var (x);
5569 TREE_ADDRESSABLE (x) = 1;
5570 x = build_fold_addr_expr_loc (clause_loc, x);
5573 else
5575 tree atmp
5576 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5577 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5578 tree al = size_int (TYPE_ALIGN (rtype));
5579 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5582 if (x)
5584 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5585 gimplify_assign (new_var, x, ilist);
5588 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5590 else if ((c_kind == OMP_CLAUSE_REDUCTION
5591 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5592 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5594 if (pass == 0)
5595 continue;
5597 else if (pass != 0)
5598 continue;
5600 switch (OMP_CLAUSE_CODE (c))
5602 case OMP_CLAUSE_SHARED:
5603 /* Ignore shared directives in teams construct inside
5604 target construct. */
5605 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5606 && !is_host_teams_ctx (ctx))
5607 continue;
5608 /* Shared global vars are just accessed directly. */
5609 if (is_global_var (new_var))
5610 break;
5611 /* For taskloop firstprivate/lastprivate, represented
5612 as firstprivate and shared clause on the task, new_var
5613 is the firstprivate var. */
5614 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5615 break;
5616 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5617 needs to be delayed until after fixup_child_record_type so
5618 that we get the correct type during the dereference. */
5619 by_ref = use_pointer_for_field (var, ctx);
5620 x = build_receiver_ref (var, by_ref, ctx);
5621 SET_DECL_VALUE_EXPR (new_var, x);
5622 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5624 /* ??? If VAR is not passed by reference, and the variable
5625 hasn't been initialized yet, then we'll get a warning for
5626 the store into the omp_data_s structure. Ideally, we'd be
5627 able to notice this and not store anything at all, but
5628 we're generating code too early. Suppress the warning. */
5629 if (!by_ref)
5630 TREE_NO_WARNING (var) = 1;
5631 break;
5633 case OMP_CLAUSE__CONDTEMP_:
5634 if (is_parallel_ctx (ctx))
5636 x = build_receiver_ref (var, false, ctx);
5637 SET_DECL_VALUE_EXPR (new_var, x);
5638 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5640 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5642 x = build_zero_cst (TREE_TYPE (var));
5643 goto do_private;
5645 break;
5647 case OMP_CLAUSE_LASTPRIVATE:
5648 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5649 break;
5650 /* FALLTHRU */
5652 case OMP_CLAUSE_PRIVATE:
5653 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5654 x = build_outer_var_ref (var, ctx);
5655 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5657 if (is_task_ctx (ctx))
5658 x = build_receiver_ref (var, false, ctx);
5659 else
5660 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5662 else
5663 x = NULL;
5664 do_private:
5665 tree nx;
5666 bool copy_ctor;
5667 copy_ctor = false;
5668 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5669 ilist, ctx, false, NULL_TREE);
5670 nx = unshare_expr (new_var);
5671 if (is_simd
5672 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5673 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5674 copy_ctor = true;
5675 if (copy_ctor)
5676 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5677 else
5678 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5679 if (is_simd)
5681 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5682 if ((TREE_ADDRESSABLE (new_var) || nx || y
5683 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5684 && (gimple_omp_for_collapse (ctx->stmt) != 1
5685 || (gimple_omp_for_index (ctx->stmt, 0)
5686 != new_var)))
5687 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5688 || omp_is_reference (var))
5689 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5690 ivar, lvar))
5692 if (omp_is_reference (var))
5694 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5695 tree new_vard = TREE_OPERAND (new_var, 0);
5696 gcc_assert (DECL_P (new_vard));
5697 SET_DECL_VALUE_EXPR (new_vard,
5698 build_fold_addr_expr (lvar));
5699 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5702 if (nx)
5704 tree iv = unshare_expr (ivar);
5705 if (copy_ctor)
5706 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5708 else
5709 x = lang_hooks.decls.omp_clause_default_ctor (c,
5713 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5715 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5716 unshare_expr (ivar), x);
5717 nx = x;
5719 if (nx && x)
5720 gimplify_and_add (x, &llist[0]);
5721 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5722 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5724 tree v = new_var;
5725 if (!DECL_P (v))
5727 gcc_assert (TREE_CODE (v) == MEM_REF);
5728 v = TREE_OPERAND (v, 0);
5729 gcc_assert (DECL_P (v));
5731 v = *ctx->lastprivate_conditional_map->get (v);
5732 tree t = create_tmp_var (TREE_TYPE (v));
5733 tree z = build_zero_cst (TREE_TYPE (v));
5734 tree orig_v
5735 = build_outer_var_ref (var, ctx,
5736 OMP_CLAUSE_LASTPRIVATE);
5737 gimple_seq_add_stmt (dlist,
5738 gimple_build_assign (t, z));
5739 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5740 tree civar = DECL_VALUE_EXPR (v);
5741 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5742 civar = unshare_expr (civar);
5743 TREE_OPERAND (civar, 1) = sctx.idx;
5744 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5745 unshare_expr (civar));
5746 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5747 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5748 orig_v, unshare_expr (ivar)));
5749 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5750 civar);
5751 x = build3 (COND_EXPR, void_type_node, cond, x,
5752 void_node);
5753 gimple_seq tseq = NULL;
5754 gimplify_and_add (x, &tseq);
5755 if (ctx->outer)
5756 lower_omp (&tseq, ctx->outer);
5757 gimple_seq_add_seq (&llist[1], tseq);
5759 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5760 && ctx->for_simd_scan_phase)
5762 x = unshare_expr (ivar);
5763 tree orig_v
5764 = build_outer_var_ref (var, ctx,
5765 OMP_CLAUSE_LASTPRIVATE);
5766 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5767 orig_v);
5768 gimplify_and_add (x, &llist[0]);
5770 if (y)
5772 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5773 if (y)
5774 gimplify_and_add (y, &llist[1]);
5776 break;
5778 if (omp_is_reference (var))
5780 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5781 tree new_vard = TREE_OPERAND (new_var, 0);
5782 gcc_assert (DECL_P (new_vard));
5783 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5784 x = TYPE_SIZE_UNIT (type);
5785 if (TREE_CONSTANT (x))
5787 x = create_tmp_var_raw (type, get_name (var));
5788 gimple_add_tmp_var (x);
5789 TREE_ADDRESSABLE (x) = 1;
5790 x = build_fold_addr_expr_loc (clause_loc, x);
5791 x = fold_convert_loc (clause_loc,
5792 TREE_TYPE (new_vard), x);
5793 gimplify_assign (new_vard, x, ilist);
5797 if (nx)
5798 gimplify_and_add (nx, ilist);
5799 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5800 && is_simd
5801 && ctx->for_simd_scan_phase)
5803 tree orig_v = build_outer_var_ref (var, ctx,
5804 OMP_CLAUSE_LASTPRIVATE);
5805 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5806 orig_v);
5807 gimplify_and_add (x, ilist);
5809 /* FALLTHRU */
5811 do_dtor:
5812 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5813 if (x)
5814 gimplify_and_add (x, dlist);
5815 if (allocator)
5817 if (!is_gimple_val (allocator))
5819 tree avar = create_tmp_var (TREE_TYPE (allocator));
5820 gimplify_assign (avar, allocator, dlist);
5821 allocator = avar;
5823 if (!is_gimple_val (allocate_ptr))
5825 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
5826 gimplify_assign (apvar, allocate_ptr, dlist);
5827 allocate_ptr = apvar;
5829 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5830 gimple *g
5831 = gimple_build_call (f, 2, allocate_ptr, allocator);
5832 gimple_seq_add_stmt (dlist, g);
5834 break;
5836 case OMP_CLAUSE_LINEAR:
5837 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5838 goto do_firstprivate;
5839 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5840 x = NULL;
5841 else
5842 x = build_outer_var_ref (var, ctx);
5843 goto do_private;
5845 case OMP_CLAUSE_FIRSTPRIVATE:
5846 if (is_task_ctx (ctx))
5848 if ((omp_is_reference (var)
5849 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5850 || is_variable_sized (var))
5851 goto do_dtor;
5852 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5853 ctx))
5854 || use_pointer_for_field (var, NULL))
5856 x = build_receiver_ref (var, false, ctx);
5857 if (ctx->allocate_map)
5858 if (tree *allocatep = ctx->allocate_map->get (var))
5860 allocator = *allocatep;
5861 if (TREE_CODE (allocator) != INTEGER_CST)
5862 allocator = build_outer_var_ref (allocator, ctx);
5863 allocator = fold_convert (pointer_sized_int_node,
5864 allocator);
5865 allocate_ptr = unshare_expr (x);
5866 x = build_simple_mem_ref (x);
5867 TREE_THIS_NOTRAP (x) = 1;
5869 SET_DECL_VALUE_EXPR (new_var, x);
5870 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5871 goto do_dtor;
5874 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5875 && omp_is_reference (var))
5877 x = build_outer_var_ref (var, ctx);
5878 gcc_assert (TREE_CODE (x) == MEM_REF
5879 && integer_zerop (TREE_OPERAND (x, 1)));
5880 x = TREE_OPERAND (x, 0);
5881 x = lang_hooks.decls.omp_clause_copy_ctor
5882 (c, unshare_expr (new_var), x);
5883 gimplify_and_add (x, ilist);
5884 goto do_dtor;
5886 do_firstprivate:
5887 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5888 ilist, ctx, false, NULL_TREE);
5889 x = build_outer_var_ref (var, ctx);
5890 if (is_simd)
5892 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5893 && gimple_omp_for_combined_into_p (ctx->stmt))
5895 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5896 tree stept = TREE_TYPE (t);
5897 tree ct = omp_find_clause (clauses,
5898 OMP_CLAUSE__LOOPTEMP_);
5899 gcc_assert (ct);
5900 tree l = OMP_CLAUSE_DECL (ct);
5901 tree n1 = fd->loop.n1;
5902 tree step = fd->loop.step;
5903 tree itype = TREE_TYPE (l);
5904 if (POINTER_TYPE_P (itype))
5905 itype = signed_type_for (itype);
5906 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5907 if (TYPE_UNSIGNED (itype)
5908 && fd->loop.cond_code == GT_EXPR)
5909 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5910 fold_build1 (NEGATE_EXPR, itype, l),
5911 fold_build1 (NEGATE_EXPR,
5912 itype, step));
5913 else
5914 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5915 t = fold_build2 (MULT_EXPR, stept,
5916 fold_convert (stept, l), t);
5918 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5920 if (omp_is_reference (var))
5922 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5923 tree new_vard = TREE_OPERAND (new_var, 0);
5924 gcc_assert (DECL_P (new_vard));
5925 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5926 nx = TYPE_SIZE_UNIT (type);
5927 if (TREE_CONSTANT (nx))
5929 nx = create_tmp_var_raw (type,
5930 get_name (var));
5931 gimple_add_tmp_var (nx);
5932 TREE_ADDRESSABLE (nx) = 1;
5933 nx = build_fold_addr_expr_loc (clause_loc,
5934 nx);
5935 nx = fold_convert_loc (clause_loc,
5936 TREE_TYPE (new_vard),
5937 nx);
5938 gimplify_assign (new_vard, nx, ilist);
5942 x = lang_hooks.decls.omp_clause_linear_ctor
5943 (c, new_var, x, t);
5944 gimplify_and_add (x, ilist);
5945 goto do_dtor;
5948 if (POINTER_TYPE_P (TREE_TYPE (x)))
5949 x = fold_build2 (POINTER_PLUS_EXPR,
5950 TREE_TYPE (x), x, t);
5951 else
5952 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5955 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5956 || TREE_ADDRESSABLE (new_var)
5957 || omp_is_reference (var))
5958 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5959 ivar, lvar))
5961 if (omp_is_reference (var))
5963 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5964 tree new_vard = TREE_OPERAND (new_var, 0);
5965 gcc_assert (DECL_P (new_vard));
5966 SET_DECL_VALUE_EXPR (new_vard,
5967 build_fold_addr_expr (lvar));
5968 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5970 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5972 tree iv = create_tmp_var (TREE_TYPE (new_var));
5973 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5974 gimplify_and_add (x, ilist);
5975 gimple_stmt_iterator gsi
5976 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5977 gassign *g
5978 = gimple_build_assign (unshare_expr (lvar), iv);
5979 gsi_insert_before_without_update (&gsi, g,
5980 GSI_SAME_STMT);
5981 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5982 enum tree_code code = PLUS_EXPR;
5983 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5984 code = POINTER_PLUS_EXPR;
5985 g = gimple_build_assign (iv, code, iv, t);
5986 gsi_insert_before_without_update (&gsi, g,
5987 GSI_SAME_STMT);
5988 break;
5990 x = lang_hooks.decls.omp_clause_copy_ctor
5991 (c, unshare_expr (ivar), x);
5992 gimplify_and_add (x, &llist[0]);
5993 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5994 if (x)
5995 gimplify_and_add (x, &llist[1]);
5996 break;
5998 if (omp_is_reference (var))
6000 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6001 tree new_vard = TREE_OPERAND (new_var, 0);
6002 gcc_assert (DECL_P (new_vard));
6003 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6004 nx = TYPE_SIZE_UNIT (type);
6005 if (TREE_CONSTANT (nx))
6007 nx = create_tmp_var_raw (type, get_name (var));
6008 gimple_add_tmp_var (nx);
6009 TREE_ADDRESSABLE (nx) = 1;
6010 nx = build_fold_addr_expr_loc (clause_loc, nx);
6011 nx = fold_convert_loc (clause_loc,
6012 TREE_TYPE (new_vard), nx);
6013 gimplify_assign (new_vard, nx, ilist);
6017 x = lang_hooks.decls.omp_clause_copy_ctor
6018 (c, unshare_expr (new_var), x);
6019 gimplify_and_add (x, ilist);
6020 goto do_dtor;
6022 case OMP_CLAUSE__LOOPTEMP_:
6023 case OMP_CLAUSE__REDUCTEMP_:
6024 gcc_assert (is_taskreg_ctx (ctx));
6025 x = build_outer_var_ref (var, ctx);
6026 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6027 gimplify_and_add (x, ilist);
6028 break;
6030 case OMP_CLAUSE_COPYIN:
6031 by_ref = use_pointer_for_field (var, NULL);
6032 x = build_receiver_ref (var, by_ref, ctx);
6033 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6034 append_to_statement_list (x, &copyin_seq);
6035 copyin_by_ref |= by_ref;
6036 break;
6038 case OMP_CLAUSE_REDUCTION:
6039 case OMP_CLAUSE_IN_REDUCTION:
6040 /* OpenACC reductions are initialized using the
6041 GOACC_REDUCTION internal function. */
6042 if (is_gimple_omp_oacc (ctx->stmt))
6043 break;
6044 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6046 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6047 gimple *tseq;
6048 tree ptype = TREE_TYPE (placeholder);
6049 if (cond)
6051 x = error_mark_node;
6052 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6053 && !task_reduction_needs_orig_p)
6054 x = var;
6055 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6057 tree pptype = build_pointer_type (ptype);
6058 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6059 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6060 size_int (task_reduction_cnt_full
6061 + task_reduction_cntorig - 1),
6062 NULL_TREE, NULL_TREE);
6063 else
6065 unsigned int idx
6066 = *ctx->task_reduction_map->get (c);
6067 x = task_reduction_read (ilist, tskred_temp,
6068 pptype, 7 + 3 * idx);
6070 x = fold_convert (pptype, x);
6071 x = build_simple_mem_ref (x);
6074 else
6076 lower_private_allocate (var, new_var, allocator,
6077 allocate_ptr, ilist, ctx, false,
6078 NULL_TREE);
6079 x = build_outer_var_ref (var, ctx);
6081 if (omp_is_reference (var)
6082 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6083 x = build_fold_addr_expr_loc (clause_loc, x);
6085 SET_DECL_VALUE_EXPR (placeholder, x);
6086 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6087 tree new_vard = new_var;
6088 if (omp_is_reference (var))
6090 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6091 new_vard = TREE_OPERAND (new_var, 0);
6092 gcc_assert (DECL_P (new_vard));
6094 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6095 if (is_simd
6096 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6097 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6098 rvarp = &rvar;
6099 if (is_simd
6100 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6101 ivar, lvar, rvarp,
6102 &rvar2))
6104 if (new_vard == new_var)
6106 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6107 SET_DECL_VALUE_EXPR (new_var, ivar);
6109 else
6111 SET_DECL_VALUE_EXPR (new_vard,
6112 build_fold_addr_expr (ivar));
6113 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6115 x = lang_hooks.decls.omp_clause_default_ctor
6116 (c, unshare_expr (ivar),
6117 build_outer_var_ref (var, ctx));
6118 if (rvarp && ctx->for_simd_scan_phase)
6120 if (x)
6121 gimplify_and_add (x, &llist[0]);
6122 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6123 if (x)
6124 gimplify_and_add (x, &llist[1]);
6125 break;
6127 else if (rvarp)
6129 if (x)
6131 gimplify_and_add (x, &llist[0]);
6133 tree ivar2 = unshare_expr (lvar);
6134 TREE_OPERAND (ivar2, 1) = sctx.idx;
6135 x = lang_hooks.decls.omp_clause_default_ctor
6136 (c, ivar2, build_outer_var_ref (var, ctx));
6137 gimplify_and_add (x, &llist[0]);
6139 if (rvar2)
6141 x = lang_hooks.decls.omp_clause_default_ctor
6142 (c, unshare_expr (rvar2),
6143 build_outer_var_ref (var, ctx));
6144 gimplify_and_add (x, &llist[0]);
6147 /* For types that need construction, add another
6148 private var which will be default constructed
6149 and optionally initialized with
6150 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6151 loop we want to assign this value instead of
6152 constructing and destructing it in each
6153 iteration. */
6154 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6155 gimple_add_tmp_var (nv);
6156 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6157 ? rvar2
6158 : ivar, 0),
6159 nv);
6160 x = lang_hooks.decls.omp_clause_default_ctor
6161 (c, nv, build_outer_var_ref (var, ctx));
6162 gimplify_and_add (x, ilist);
6164 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6166 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6167 x = DECL_VALUE_EXPR (new_vard);
6168 tree vexpr = nv;
6169 if (new_vard != new_var)
6170 vexpr = build_fold_addr_expr (nv);
6171 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6172 lower_omp (&tseq, ctx);
6173 SET_DECL_VALUE_EXPR (new_vard, x);
6174 gimple_seq_add_seq (ilist, tseq);
6175 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6178 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6179 if (x)
6180 gimplify_and_add (x, dlist);
6183 tree ref = build_outer_var_ref (var, ctx);
6184 x = unshare_expr (ivar);
6185 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6186 ref);
6187 gimplify_and_add (x, &llist[0]);
6189 ref = build_outer_var_ref (var, ctx);
6190 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6191 rvar);
6192 gimplify_and_add (x, &llist[3]);
6194 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6195 if (new_vard == new_var)
6196 SET_DECL_VALUE_EXPR (new_var, lvar);
6197 else
6198 SET_DECL_VALUE_EXPR (new_vard,
6199 build_fold_addr_expr (lvar));
6201 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6202 if (x)
6203 gimplify_and_add (x, &llist[1]);
6205 tree ivar2 = unshare_expr (lvar);
6206 TREE_OPERAND (ivar2, 1) = sctx.idx;
6207 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6208 if (x)
6209 gimplify_and_add (x, &llist[1]);
6211 if (rvar2)
6213 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6214 if (x)
6215 gimplify_and_add (x, &llist[1]);
6217 break;
6219 if (x)
6220 gimplify_and_add (x, &llist[0]);
6221 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6223 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6224 lower_omp (&tseq, ctx);
6225 gimple_seq_add_seq (&llist[0], tseq);
6227 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6228 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6229 lower_omp (&tseq, ctx);
6230 gimple_seq_add_seq (&llist[1], tseq);
6231 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6232 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6233 if (new_vard == new_var)
6234 SET_DECL_VALUE_EXPR (new_var, lvar);
6235 else
6236 SET_DECL_VALUE_EXPR (new_vard,
6237 build_fold_addr_expr (lvar));
6238 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6239 if (x)
6240 gimplify_and_add (x, &llist[1]);
6241 break;
6243 /* If this is a reference to constant size reduction var
6244 with placeholder, we haven't emitted the initializer
6245 for it because it is undesirable if SIMD arrays are used.
6246 But if they aren't used, we need to emit the deferred
6247 initialization now. */
6248 else if (omp_is_reference (var) && is_simd)
6249 handle_simd_reference (clause_loc, new_vard, ilist);
6251 tree lab2 = NULL_TREE;
6252 if (cond)
6254 gimple *g;
6255 if (!is_parallel_ctx (ctx))
6257 tree condv = create_tmp_var (boolean_type_node);
6258 tree m = build_simple_mem_ref (cond);
6259 g = gimple_build_assign (condv, m);
6260 gimple_seq_add_stmt (ilist, g);
6261 tree lab1
6262 = create_artificial_label (UNKNOWN_LOCATION);
6263 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6264 g = gimple_build_cond (NE_EXPR, condv,
6265 boolean_false_node,
6266 lab2, lab1);
6267 gimple_seq_add_stmt (ilist, g);
6268 gimple_seq_add_stmt (ilist,
6269 gimple_build_label (lab1));
6271 g = gimple_build_assign (build_simple_mem_ref (cond),
6272 boolean_true_node);
6273 gimple_seq_add_stmt (ilist, g);
6275 x = lang_hooks.decls.omp_clause_default_ctor
6276 (c, unshare_expr (new_var),
6277 cond ? NULL_TREE
6278 : build_outer_var_ref (var, ctx));
6279 if (x)
6280 gimplify_and_add (x, ilist);
6282 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6283 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6285 if (ctx->for_simd_scan_phase)
6286 goto do_dtor;
6287 if (x || (!is_simd
6288 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6290 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6291 gimple_add_tmp_var (nv);
6292 ctx->cb.decl_map->put (new_vard, nv);
6293 x = lang_hooks.decls.omp_clause_default_ctor
6294 (c, nv, build_outer_var_ref (var, ctx));
6295 if (x)
6296 gimplify_and_add (x, ilist);
6297 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6299 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6300 tree vexpr = nv;
6301 if (new_vard != new_var)
6302 vexpr = build_fold_addr_expr (nv);
6303 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6304 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6305 lower_omp (&tseq, ctx);
6306 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6307 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6308 gimple_seq_add_seq (ilist, tseq);
6310 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6311 if (is_simd && ctx->scan_exclusive)
6313 tree nv2
6314 = create_tmp_var_raw (TREE_TYPE (new_var));
6315 gimple_add_tmp_var (nv2);
6316 ctx->cb.decl_map->put (nv, nv2);
6317 x = lang_hooks.decls.omp_clause_default_ctor
6318 (c, nv2, build_outer_var_ref (var, ctx));
6319 gimplify_and_add (x, ilist);
6320 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6321 if (x)
6322 gimplify_and_add (x, dlist);
6324 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6325 if (x)
6326 gimplify_and_add (x, dlist);
6328 else if (is_simd
6329 && ctx->scan_exclusive
6330 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6332 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6333 gimple_add_tmp_var (nv2);
6334 ctx->cb.decl_map->put (new_vard, nv2);
6335 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6336 if (x)
6337 gimplify_and_add (x, dlist);
6339 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6340 goto do_dtor;
6343 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6345 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6346 lower_omp (&tseq, ctx);
6347 gimple_seq_add_seq (ilist, tseq);
6349 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6350 if (is_simd)
6352 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6353 lower_omp (&tseq, ctx);
6354 gimple_seq_add_seq (dlist, tseq);
6355 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6357 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6358 if (cond)
6360 if (lab2)
6361 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6362 break;
6364 goto do_dtor;
6366 else
6368 x = omp_reduction_init (c, TREE_TYPE (new_var));
6369 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6370 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6372 if (cond)
6374 gimple *g;
6375 tree lab2 = NULL_TREE;
6376 /* GOMP_taskgroup_reduction_register memsets the whole
6377 array to zero. If the initializer is zero, we don't
6378 need to initialize it again, just mark it as ever
6379 used unconditionally, i.e. cond = true. */
6380 if (initializer_zerop (x))
6382 g = gimple_build_assign (build_simple_mem_ref (cond),
6383 boolean_true_node);
6384 gimple_seq_add_stmt (ilist, g);
6385 break;
6388 /* Otherwise, emit
6389 if (!cond) { cond = true; new_var = x; } */
6390 if (!is_parallel_ctx (ctx))
6392 tree condv = create_tmp_var (boolean_type_node);
6393 tree m = build_simple_mem_ref (cond);
6394 g = gimple_build_assign (condv, m);
6395 gimple_seq_add_stmt (ilist, g);
6396 tree lab1
6397 = create_artificial_label (UNKNOWN_LOCATION);
6398 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6399 g = gimple_build_cond (NE_EXPR, condv,
6400 boolean_false_node,
6401 lab2, lab1);
6402 gimple_seq_add_stmt (ilist, g);
6403 gimple_seq_add_stmt (ilist,
6404 gimple_build_label (lab1));
6406 g = gimple_build_assign (build_simple_mem_ref (cond),
6407 boolean_true_node);
6408 gimple_seq_add_stmt (ilist, g);
6409 gimplify_assign (new_var, x, ilist);
6410 if (lab2)
6411 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6412 break;
6415 /* reduction(-:var) sums up the partial results, so it
6416 acts identically to reduction(+:var). */
6417 if (code == MINUS_EXPR)
6418 code = PLUS_EXPR;
6420 /* C/C++ permits FP/complex with || and &&. */
6421 bool is_fp_and_or
6422 = ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6423 && (FLOAT_TYPE_P (TREE_TYPE (new_var))
6424 || TREE_CODE (TREE_TYPE (new_var)) == COMPLEX_TYPE));
6425 tree new_vard = new_var;
6426 if (is_simd && omp_is_reference (var))
6428 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6429 new_vard = TREE_OPERAND (new_var, 0);
6430 gcc_assert (DECL_P (new_vard));
6432 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6433 if (is_simd
6434 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6435 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6436 rvarp = &rvar;
6437 if (is_simd
6438 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6439 ivar, lvar, rvarp,
6440 &rvar2))
6442 if (new_vard != new_var)
6444 SET_DECL_VALUE_EXPR (new_vard,
6445 build_fold_addr_expr (lvar));
6446 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6449 tree ref = build_outer_var_ref (var, ctx);
6451 if (rvarp)
6453 if (ctx->for_simd_scan_phase)
6454 break;
6455 gimplify_assign (ivar, ref, &llist[0]);
6456 ref = build_outer_var_ref (var, ctx);
6457 gimplify_assign (ref, rvar, &llist[3]);
6458 break;
6461 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6463 if (sctx.is_simt)
6465 if (!simt_lane)
6466 simt_lane = create_tmp_var (unsigned_type_node);
6467 x = build_call_expr_internal_loc
6468 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6469 TREE_TYPE (ivar), 2, ivar, simt_lane);
6470 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6471 gimplify_assign (ivar, x, &llist[2]);
6473 tree ivar2 = ivar;
6474 tree ref2 = ref;
6475 if (is_fp_and_or)
6477 tree zero = build_zero_cst (TREE_TYPE (ivar));
6478 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6479 integer_type_node, ivar,
6480 zero);
6481 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6482 integer_type_node, ref, zero);
6484 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6485 if (is_fp_and_or)
6486 x = fold_convert (TREE_TYPE (ref), x);
6487 ref = build_outer_var_ref (var, ctx);
6488 gimplify_assign (ref, x, &llist[1]);
6491 else
6493 lower_private_allocate (var, new_var, allocator,
6494 allocate_ptr, ilist, ctx,
6495 false, NULL_TREE);
6496 if (omp_is_reference (var) && is_simd)
6497 handle_simd_reference (clause_loc, new_vard, ilist);
6498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6499 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6500 break;
6501 gimplify_assign (new_var, x, ilist);
6502 if (is_simd)
6504 tree ref = build_outer_var_ref (var, ctx);
6505 tree new_var2 = new_var;
6506 tree ref2 = ref;
6507 if (is_fp_and_or)
6509 tree zero = build_zero_cst (TREE_TYPE (new_var));
6510 new_var2
6511 = fold_build2_loc (clause_loc, NE_EXPR,
6512 integer_type_node, new_var,
6513 zero);
6514 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6515 integer_type_node, ref,
6516 zero);
6518 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6519 if (is_fp_and_or)
6520 x = fold_convert (TREE_TYPE (new_var), x);
6521 ref = build_outer_var_ref (var, ctx);
6522 gimplify_assign (ref, x, dlist);
6524 if (allocator)
6525 goto do_dtor;
6528 break;
6530 default:
6531 gcc_unreachable ();
6535 if (tskred_avar)
6537 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6538 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6541 if (known_eq (sctx.max_vf, 1U))
6543 sctx.is_simt = false;
6544 if (ctx->lastprivate_conditional_map)
6546 if (gimple_omp_for_combined_into_p (ctx->stmt))
6548 /* Signal to lower_omp_1 that it should use parent context. */
6549 ctx->combined_into_simd_safelen1 = true;
6550 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6551 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6552 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6554 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6555 omp_context *outer = ctx->outer;
6556 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6557 outer = outer->outer;
6558 tree *v = ctx->lastprivate_conditional_map->get (o);
6559 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6560 tree *pv = outer->lastprivate_conditional_map->get (po);
6561 *v = *pv;
6564 else
6566 /* When not vectorized, treat lastprivate(conditional:) like
6567 normal lastprivate, as there will be just one simd lane
6568 writing the privatized variable. */
6569 delete ctx->lastprivate_conditional_map;
6570 ctx->lastprivate_conditional_map = NULL;
6575 if (nonconst_simd_if)
6577 if (sctx.lane == NULL_TREE)
6579 sctx.idx = create_tmp_var (unsigned_type_node);
6580 sctx.lane = create_tmp_var (unsigned_type_node);
6582 /* FIXME: For now. */
6583 sctx.is_simt = false;
6586 if (sctx.lane || sctx.is_simt)
6588 uid = create_tmp_var (ptr_type_node, "simduid");
6589 /* Don't want uninit warnings on simduid, it is always uninitialized,
6590 but we use it not for the value, but for the DECL_UID only. */
6591 TREE_NO_WARNING (uid) = 1;
6592 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6593 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6594 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6595 gimple_omp_for_set_clauses (ctx->stmt, c);
6597 /* Emit calls denoting privatized variables and initializing a pointer to
6598 structure that holds private variables as fields after ompdevlow pass. */
6599 if (sctx.is_simt)
6601 sctx.simt_eargs[0] = uid;
6602 gimple *g
6603 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6604 gimple_call_set_lhs (g, uid);
6605 gimple_seq_add_stmt (ilist, g);
6606 sctx.simt_eargs.release ();
6608 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6609 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6610 gimple_call_set_lhs (g, simtrec);
6611 gimple_seq_add_stmt (ilist, g);
6613 if (sctx.lane)
6615 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6616 2 + (nonconst_simd_if != NULL),
6617 uid, integer_zero_node,
6618 nonconst_simd_if);
6619 gimple_call_set_lhs (g, sctx.lane);
6620 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6621 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6622 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6623 build_int_cst (unsigned_type_node, 0));
6624 gimple_seq_add_stmt (ilist, g);
6625 if (sctx.lastlane)
6627 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6628 2, uid, sctx.lane);
6629 gimple_call_set_lhs (g, sctx.lastlane);
6630 gimple_seq_add_stmt (dlist, g);
6631 gimple_seq_add_seq (dlist, llist[3]);
6633 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6634 if (llist[2])
6636 tree simt_vf = create_tmp_var (unsigned_type_node);
6637 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6638 gimple_call_set_lhs (g, simt_vf);
6639 gimple_seq_add_stmt (dlist, g);
6641 tree t = build_int_cst (unsigned_type_node, 1);
6642 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6643 gimple_seq_add_stmt (dlist, g);
6645 t = build_int_cst (unsigned_type_node, 0);
6646 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6647 gimple_seq_add_stmt (dlist, g);
6649 tree body = create_artificial_label (UNKNOWN_LOCATION);
6650 tree header = create_artificial_label (UNKNOWN_LOCATION);
6651 tree end = create_artificial_label (UNKNOWN_LOCATION);
6652 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6653 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6655 gimple_seq_add_seq (dlist, llist[2]);
6657 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6658 gimple_seq_add_stmt (dlist, g);
6660 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6661 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6662 gimple_seq_add_stmt (dlist, g);
6664 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6666 for (int i = 0; i < 2; i++)
6667 if (llist[i])
6669 tree vf = create_tmp_var (unsigned_type_node);
6670 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6671 gimple_call_set_lhs (g, vf);
6672 gimple_seq *seq = i == 0 ? ilist : dlist;
6673 gimple_seq_add_stmt (seq, g);
6674 tree t = build_int_cst (unsigned_type_node, 0);
6675 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6676 gimple_seq_add_stmt (seq, g);
6677 tree body = create_artificial_label (UNKNOWN_LOCATION);
6678 tree header = create_artificial_label (UNKNOWN_LOCATION);
6679 tree end = create_artificial_label (UNKNOWN_LOCATION);
6680 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6681 gimple_seq_add_stmt (seq, gimple_build_label (body));
6682 gimple_seq_add_seq (seq, llist[i]);
6683 t = build_int_cst (unsigned_type_node, 1);
6684 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6685 gimple_seq_add_stmt (seq, g);
6686 gimple_seq_add_stmt (seq, gimple_build_label (header));
6687 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6688 gimple_seq_add_stmt (seq, g);
6689 gimple_seq_add_stmt (seq, gimple_build_label (end));
6692 if (sctx.is_simt)
6694 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6695 gimple *g
6696 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6697 gimple_seq_add_stmt (dlist, g);
6700 /* The copyin sequence is not to be executed by the main thread, since
6701 that would result in self-copies. Perhaps not visible to scalars,
6702 but it certainly is to C++ operator=. */
6703 if (copyin_seq)
6705 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6707 x = build2 (NE_EXPR, boolean_type_node, x,
6708 build_int_cst (TREE_TYPE (x), 0));
6709 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6710 gimplify_and_add (x, ilist);
6713 /* If any copyin variable is passed by reference, we must ensure the
6714 master thread doesn't modify it before it is copied over in all
6715 threads. Similarly for variables in both firstprivate and
6716 lastprivate clauses we need to ensure the lastprivate copying
6717 happens after firstprivate copying in all threads. And similarly
6718 for UDRs if initializer expression refers to omp_orig. */
6719 if (copyin_by_ref || lastprivate_firstprivate
6720 || (reduction_omp_orig_ref
6721 && !ctx->scan_inclusive
6722 && !ctx->scan_exclusive))
6724 /* Don't add any barrier for #pragma omp simd or
6725 #pragma omp distribute. */
6726 if (!is_task_ctx (ctx)
6727 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6728 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6729 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6732 /* If max_vf is non-zero, then we can use only a vectorization factor
6733 up to the max_vf we chose. So stick it into the safelen clause. */
6734 if (maybe_ne (sctx.max_vf, 0U))
6736 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6737 OMP_CLAUSE_SAFELEN);
6738 poly_uint64 safe_len;
6739 if (c == NULL_TREE
6740 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6741 && maybe_gt (safe_len, sctx.max_vf)))
6743 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6744 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6745 sctx.max_vf);
6746 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6747 gimple_omp_for_set_clauses (ctx->stmt, c);
6752 /* Create temporary variables for lastprivate(conditional:) implementation
6753 in context CTX with CLAUSES. */
6755 static void
6756 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6758 tree iter_type = NULL_TREE;
6759 tree cond_ptr = NULL_TREE;
6760 tree iter_var = NULL_TREE;
6761 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6762 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6763 tree next = *clauses;
6764 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6766 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6768 if (is_simd)
6770 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6771 gcc_assert (cc);
6772 if (iter_type == NULL_TREE)
6774 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6775 iter_var = create_tmp_var_raw (iter_type);
6776 DECL_CONTEXT (iter_var) = current_function_decl;
6777 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6778 DECL_CHAIN (iter_var) = ctx->block_vars;
6779 ctx->block_vars = iter_var;
6780 tree c3
6781 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6782 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6783 OMP_CLAUSE_DECL (c3) = iter_var;
6784 OMP_CLAUSE_CHAIN (c3) = *clauses;
6785 *clauses = c3;
6786 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6788 next = OMP_CLAUSE_CHAIN (cc);
6789 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6790 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6791 ctx->lastprivate_conditional_map->put (o, v);
6792 continue;
6794 if (iter_type == NULL)
6796 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6798 struct omp_for_data fd;
6799 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6800 NULL);
6801 iter_type = unsigned_type_for (fd.iter_type);
6803 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6804 iter_type = unsigned_type_node;
6805 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6806 if (c2)
6808 cond_ptr
6809 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6810 OMP_CLAUSE_DECL (c2) = cond_ptr;
6812 else
6814 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6815 DECL_CONTEXT (cond_ptr) = current_function_decl;
6816 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6817 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6818 ctx->block_vars = cond_ptr;
6819 c2 = build_omp_clause (UNKNOWN_LOCATION,
6820 OMP_CLAUSE__CONDTEMP_);
6821 OMP_CLAUSE_DECL (c2) = cond_ptr;
6822 OMP_CLAUSE_CHAIN (c2) = *clauses;
6823 *clauses = c2;
6825 iter_var = create_tmp_var_raw (iter_type);
6826 DECL_CONTEXT (iter_var) = current_function_decl;
6827 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6828 DECL_CHAIN (iter_var) = ctx->block_vars;
6829 ctx->block_vars = iter_var;
6830 tree c3
6831 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6832 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6833 OMP_CLAUSE_DECL (c3) = iter_var;
6834 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6835 OMP_CLAUSE_CHAIN (c2) = c3;
6836 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6838 tree v = create_tmp_var_raw (iter_type);
6839 DECL_CONTEXT (v) = current_function_decl;
6840 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6841 DECL_CHAIN (v) = ctx->block_vars;
6842 ctx->block_vars = v;
6843 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6844 ctx->lastprivate_conditional_map->put (o, v);
6849 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6850 both parallel and workshare constructs. PREDICATE may be NULL if it's
6851 always true. BODY_P is the sequence to insert early initialization
6852 if needed, STMT_LIST is where the non-conditional lastprivate handling
6853 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6854 section. */
6856 static void
6857 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6858 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6859 omp_context *ctx)
6861 tree x, c, label = NULL, orig_clauses = clauses;
6862 bool par_clauses = false;
6863 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6864 unsigned HOST_WIDE_INT conditional_off = 0;
6865 gimple_seq post_stmt_list = NULL;
6867 /* Early exit if there are no lastprivate or linear clauses. */
6868 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6869 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6870 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6871 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6872 break;
6873 if (clauses == NULL)
6875 /* If this was a workshare clause, see if it had been combined
6876 with its parallel. In that case, look for the clauses on the
6877 parallel statement itself. */
6878 if (is_parallel_ctx (ctx))
6879 return;
6881 ctx = ctx->outer;
6882 if (ctx == NULL || !is_parallel_ctx (ctx))
6883 return;
6885 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6886 OMP_CLAUSE_LASTPRIVATE);
6887 if (clauses == NULL)
6888 return;
6889 par_clauses = true;
6892 bool maybe_simt = false;
6893 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6894 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6896 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6897 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6898 if (simduid)
6899 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6902 if (predicate)
6904 gcond *stmt;
6905 tree label_true, arm1, arm2;
6906 enum tree_code pred_code = TREE_CODE (predicate);
6908 label = create_artificial_label (UNKNOWN_LOCATION);
6909 label_true = create_artificial_label (UNKNOWN_LOCATION);
6910 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6912 arm1 = TREE_OPERAND (predicate, 0);
6913 arm2 = TREE_OPERAND (predicate, 1);
6914 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6915 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6917 else
6919 arm1 = predicate;
6920 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6921 arm2 = boolean_false_node;
6922 pred_code = NE_EXPR;
6924 if (maybe_simt)
6926 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6927 c = fold_convert (integer_type_node, c);
6928 simtcond = create_tmp_var (integer_type_node);
6929 gimplify_assign (simtcond, c, stmt_list);
6930 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6931 1, simtcond);
6932 c = create_tmp_var (integer_type_node);
6933 gimple_call_set_lhs (g, c);
6934 gimple_seq_add_stmt (stmt_list, g);
6935 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6936 label_true, label);
6938 else
6939 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6940 gimple_seq_add_stmt (stmt_list, stmt);
6941 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6944 tree cond_ptr = NULL_TREE;
6945 for (c = clauses; c ;)
6947 tree var, new_var;
6948 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6949 gimple_seq *this_stmt_list = stmt_list;
6950 tree lab2 = NULL_TREE;
6952 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6953 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6954 && ctx->lastprivate_conditional_map
6955 && !ctx->combined_into_simd_safelen1)
6957 gcc_assert (body_p);
6958 if (simduid)
6959 goto next;
6960 if (cond_ptr == NULL_TREE)
6962 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6963 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6965 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6966 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6967 tree v = *ctx->lastprivate_conditional_map->get (o);
6968 gimplify_assign (v, build_zero_cst (type), body_p);
6969 this_stmt_list = cstmt_list;
6970 tree mem;
6971 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6973 mem = build2 (MEM_REF, type, cond_ptr,
6974 build_int_cst (TREE_TYPE (cond_ptr),
6975 conditional_off));
6976 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6978 else
6979 mem = build4 (ARRAY_REF, type, cond_ptr,
6980 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6981 tree mem2 = copy_node (mem);
6982 gimple_seq seq = NULL;
6983 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6984 gimple_seq_add_seq (this_stmt_list, seq);
6985 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6986 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6987 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6988 gimple_seq_add_stmt (this_stmt_list, g);
6989 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6990 gimplify_assign (mem2, v, this_stmt_list);
6992 else if (predicate
6993 && ctx->combined_into_simd_safelen1
6994 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6995 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6996 && ctx->lastprivate_conditional_map)
6997 this_stmt_list = &post_stmt_list;
6999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7000 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7001 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7003 var = OMP_CLAUSE_DECL (c);
7004 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7005 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7006 && is_taskloop_ctx (ctx))
7008 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7009 new_var = lookup_decl (var, ctx->outer);
7011 else
7013 new_var = lookup_decl (var, ctx);
7014 /* Avoid uninitialized warnings for lastprivate and
7015 for linear iterators. */
7016 if (predicate
7017 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7018 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7019 TREE_NO_WARNING (new_var) = 1;
7022 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7024 tree val = DECL_VALUE_EXPR (new_var);
7025 if (TREE_CODE (val) == ARRAY_REF
7026 && VAR_P (TREE_OPERAND (val, 0))
7027 && lookup_attribute ("omp simd array",
7028 DECL_ATTRIBUTES (TREE_OPERAND (val,
7029 0))))
7031 if (lastlane == NULL)
7033 lastlane = create_tmp_var (unsigned_type_node);
7034 gcall *g
7035 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7036 2, simduid,
7037 TREE_OPERAND (val, 1));
7038 gimple_call_set_lhs (g, lastlane);
7039 gimple_seq_add_stmt (this_stmt_list, g);
7041 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7042 TREE_OPERAND (val, 0), lastlane,
7043 NULL_TREE, NULL_TREE);
7044 TREE_THIS_NOTRAP (new_var) = 1;
7047 else if (maybe_simt)
7049 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7050 ? DECL_VALUE_EXPR (new_var)
7051 : new_var);
7052 if (simtlast == NULL)
7054 simtlast = create_tmp_var (unsigned_type_node);
7055 gcall *g = gimple_build_call_internal
7056 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7057 gimple_call_set_lhs (g, simtlast);
7058 gimple_seq_add_stmt (this_stmt_list, g);
7060 x = build_call_expr_internal_loc
7061 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7062 TREE_TYPE (val), 2, val, simtlast);
7063 new_var = unshare_expr (new_var);
7064 gimplify_assign (new_var, x, this_stmt_list);
7065 new_var = unshare_expr (new_var);
7068 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7069 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7071 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7072 gimple_seq_add_seq (this_stmt_list,
7073 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7074 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7076 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7077 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7079 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7080 gimple_seq_add_seq (this_stmt_list,
7081 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7082 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7085 x = NULL_TREE;
7086 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7087 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7088 && is_taskloop_ctx (ctx))
7090 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7091 ctx->outer->outer);
7092 if (is_global_var (ovar))
7093 x = ovar;
7095 if (!x)
7096 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7097 if (omp_is_reference (var))
7098 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7099 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7100 gimplify_and_add (x, this_stmt_list);
7102 if (lab2)
7103 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7106 next:
7107 c = OMP_CLAUSE_CHAIN (c);
7108 if (c == NULL && !par_clauses)
7110 /* If this was a workshare clause, see if it had been combined
7111 with its parallel. In that case, continue looking for the
7112 clauses also on the parallel statement itself. */
7113 if (is_parallel_ctx (ctx))
7114 break;
7116 ctx = ctx->outer;
7117 if (ctx == NULL || !is_parallel_ctx (ctx))
7118 break;
7120 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7121 OMP_CLAUSE_LASTPRIVATE);
7122 par_clauses = true;
7126 if (label)
7127 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7128 gimple_seq_add_seq (stmt_list, post_stmt_list);
7131 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7132 (which might be a placeholder). INNER is true if this is an inner
7133 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7134 join markers. Generate the before-loop forking sequence in
7135 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7136 general form of these sequences is
7138 GOACC_REDUCTION_SETUP
7139 GOACC_FORK
7140 GOACC_REDUCTION_INIT
7142 GOACC_REDUCTION_FINI
7143 GOACC_JOIN
7144 GOACC_REDUCTION_TEARDOWN. */
7146 static void
7147 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7148 gcall *fork, gcall *private_marker, gcall *join,
7149 gimple_seq *fork_seq, gimple_seq *join_seq,
7150 omp_context *ctx)
7152 gimple_seq before_fork = NULL;
7153 gimple_seq after_fork = NULL;
7154 gimple_seq before_join = NULL;
7155 gimple_seq after_join = NULL;
7156 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7157 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7158 unsigned offset = 0;
7160 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7161 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7163 /* No 'reduction' clauses on OpenACC 'kernels'. */
7164 gcc_checking_assert (!is_oacc_kernels (ctx));
7165 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7166 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7168 tree orig = OMP_CLAUSE_DECL (c);
7169 tree var = maybe_lookup_decl (orig, ctx);
7170 tree ref_to_res = NULL_TREE;
7171 tree incoming, outgoing, v1, v2, v3;
7172 bool is_private = false;
7174 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7175 if (rcode == MINUS_EXPR)
7176 rcode = PLUS_EXPR;
7177 else if (rcode == TRUTH_ANDIF_EXPR)
7178 rcode = BIT_AND_EXPR;
7179 else if (rcode == TRUTH_ORIF_EXPR)
7180 rcode = BIT_IOR_EXPR;
7181 tree op = build_int_cst (unsigned_type_node, rcode);
7183 if (!var)
7184 var = orig;
7186 incoming = outgoing = var;
7188 if (!inner)
7190 /* See if an outer construct also reduces this variable. */
7191 omp_context *outer = ctx;
7193 while (omp_context *probe = outer->outer)
7195 enum gimple_code type = gimple_code (probe->stmt);
7196 tree cls;
7198 switch (type)
7200 case GIMPLE_OMP_FOR:
7201 cls = gimple_omp_for_clauses (probe->stmt);
7202 break;
7204 case GIMPLE_OMP_TARGET:
7205 /* No 'reduction' clauses inside OpenACC 'kernels'
7206 regions. */
7207 gcc_checking_assert (!is_oacc_kernels (probe));
7209 if (!is_gimple_omp_offloaded (probe->stmt))
7210 goto do_lookup;
7212 cls = gimple_omp_target_clauses (probe->stmt);
7213 break;
7215 default:
7216 goto do_lookup;
7219 outer = probe;
7220 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7221 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7222 && orig == OMP_CLAUSE_DECL (cls))
7224 incoming = outgoing = lookup_decl (orig, probe);
7225 goto has_outer_reduction;
7227 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7228 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7229 && orig == OMP_CLAUSE_DECL (cls))
7231 is_private = true;
7232 goto do_lookup;
7236 do_lookup:
7237 /* This is the outermost construct with this reduction,
7238 see if there's a mapping for it. */
7239 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7240 && maybe_lookup_field (orig, outer) && !is_private)
7242 ref_to_res = build_receiver_ref (orig, false, outer);
7243 if (omp_is_reference (orig))
7244 ref_to_res = build_simple_mem_ref (ref_to_res);
7246 tree type = TREE_TYPE (var);
7247 if (POINTER_TYPE_P (type))
7248 type = TREE_TYPE (type);
7250 outgoing = var;
7251 incoming = omp_reduction_init_op (loc, rcode, type);
7253 else
7255 /* Try to look at enclosing contexts for reduction var,
7256 use original if no mapping found. */
7257 tree t = NULL_TREE;
7258 omp_context *c = ctx->outer;
7259 while (c && !t)
7261 t = maybe_lookup_decl (orig, c);
7262 c = c->outer;
7264 incoming = outgoing = (t ? t : orig);
7267 has_outer_reduction:;
7270 if (!ref_to_res)
7271 ref_to_res = integer_zero_node;
7273 if (omp_is_reference (orig))
7275 tree type = TREE_TYPE (var);
7276 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7278 if (!inner)
7280 tree x = create_tmp_var (TREE_TYPE (type), id);
7281 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7284 v1 = create_tmp_var (type, id);
7285 v2 = create_tmp_var (type, id);
7286 v3 = create_tmp_var (type, id);
7288 gimplify_assign (v1, var, fork_seq);
7289 gimplify_assign (v2, var, fork_seq);
7290 gimplify_assign (v3, var, fork_seq);
7292 var = build_simple_mem_ref (var);
7293 v1 = build_simple_mem_ref (v1);
7294 v2 = build_simple_mem_ref (v2);
7295 v3 = build_simple_mem_ref (v3);
7296 outgoing = build_simple_mem_ref (outgoing);
7298 if (!TREE_CONSTANT (incoming))
7299 incoming = build_simple_mem_ref (incoming);
7301 else
7302 v1 = v2 = v3 = var;
7304 /* Determine position in reduction buffer, which may be used
7305 by target. The parser has ensured that this is not a
7306 variable-sized type. */
7307 fixed_size_mode mode
7308 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7309 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7310 offset = (offset + align - 1) & ~(align - 1);
7311 tree off = build_int_cst (sizetype, offset);
7312 offset += GET_MODE_SIZE (mode);
7314 if (!init_code)
7316 init_code = build_int_cst (integer_type_node,
7317 IFN_GOACC_REDUCTION_INIT);
7318 fini_code = build_int_cst (integer_type_node,
7319 IFN_GOACC_REDUCTION_FINI);
7320 setup_code = build_int_cst (integer_type_node,
7321 IFN_GOACC_REDUCTION_SETUP);
7322 teardown_code = build_int_cst (integer_type_node,
7323 IFN_GOACC_REDUCTION_TEARDOWN);
7326 tree setup_call
7327 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7328 TREE_TYPE (var), 6, setup_code,
7329 unshare_expr (ref_to_res),
7330 incoming, level, op, off);
7331 tree init_call
7332 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7333 TREE_TYPE (var), 6, init_code,
7334 unshare_expr (ref_to_res),
7335 v1, level, op, off);
7336 tree fini_call
7337 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7338 TREE_TYPE (var), 6, fini_code,
7339 unshare_expr (ref_to_res),
7340 v2, level, op, off);
7341 tree teardown_call
7342 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7343 TREE_TYPE (var), 6, teardown_code,
7344 ref_to_res, v3, level, op, off);
7346 gimplify_assign (v1, setup_call, &before_fork);
7347 gimplify_assign (v2, init_call, &after_fork);
7348 gimplify_assign (v3, fini_call, &before_join);
7349 gimplify_assign (outgoing, teardown_call, &after_join);
7352 /* Now stitch things together. */
7353 gimple_seq_add_seq (fork_seq, before_fork);
7354 if (private_marker)
7355 gimple_seq_add_stmt (fork_seq, private_marker);
7356 if (fork)
7357 gimple_seq_add_stmt (fork_seq, fork);
7358 gimple_seq_add_seq (fork_seq, after_fork);
7360 gimple_seq_add_seq (join_seq, before_join);
7361 if (join)
7362 gimple_seq_add_stmt (join_seq, join);
7363 gimple_seq_add_seq (join_seq, after_join);
7366 /* Generate code to implement the REDUCTION clauses, append it
7367 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7368 that should be emitted also inside of the critical section,
7369 in that case clear *CLIST afterwards, otherwise leave it as is
7370 and let the caller emit it itself. */
7372 static void
7373 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7374 gimple_seq *clist, omp_context *ctx)
7376 gimple_seq sub_seq = NULL;
7377 gimple *stmt;
7378 tree x, c;
7379 int count = 0;
7381 /* OpenACC loop reductions are handled elsewhere. */
7382 if (is_gimple_omp_oacc (ctx->stmt))
7383 return;
7385 /* SIMD reductions are handled in lower_rec_input_clauses. */
7386 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7387 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7388 return;
7390 /* inscan reductions are handled elsewhere. */
7391 if (ctx->scan_inclusive || ctx->scan_exclusive)
7392 return;
7394 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7395 update in that case, otherwise use a lock. */
7396 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7397 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7398 && !OMP_CLAUSE_REDUCTION_TASK (c))
7400 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7401 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7403 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7404 count = -1;
7405 break;
7407 count++;
7410 if (count == 0)
7411 return;
7413 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7415 tree var, ref, new_var, orig_var;
7416 enum tree_code code;
7417 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7419 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7420 || OMP_CLAUSE_REDUCTION_TASK (c))
7421 continue;
7423 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7424 orig_var = var = OMP_CLAUSE_DECL (c);
7425 if (TREE_CODE (var) == MEM_REF)
7427 var = TREE_OPERAND (var, 0);
7428 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7429 var = TREE_OPERAND (var, 0);
7430 if (TREE_CODE (var) == ADDR_EXPR)
7431 var = TREE_OPERAND (var, 0);
7432 else
7434 /* If this is a pointer or referenced based array
7435 section, the var could be private in the outer
7436 context e.g. on orphaned loop construct. Pretend this
7437 is private variable's outer reference. */
7438 ccode = OMP_CLAUSE_PRIVATE;
7439 if (TREE_CODE (var) == INDIRECT_REF)
7440 var = TREE_OPERAND (var, 0);
7442 orig_var = var;
7443 if (is_variable_sized (var))
7445 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7446 var = DECL_VALUE_EXPR (var);
7447 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7448 var = TREE_OPERAND (var, 0);
7449 gcc_assert (DECL_P (var));
7452 new_var = lookup_decl (var, ctx);
7453 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7454 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7455 ref = build_outer_var_ref (var, ctx, ccode);
7456 code = OMP_CLAUSE_REDUCTION_CODE (c);
7458 /* reduction(-:var) sums up the partial results, so it acts
7459 identically to reduction(+:var). */
7460 if (code == MINUS_EXPR)
7461 code = PLUS_EXPR;
7463 /* C/C++ permits FP/complex with || and &&. */
7464 bool is_fp_and_or = ((code == TRUTH_ANDIF_EXPR
7465 || code == TRUTH_ORIF_EXPR)
7466 && (FLOAT_TYPE_P (TREE_TYPE (new_var))
7467 || (TREE_CODE (TREE_TYPE (new_var))
7468 == COMPLEX_TYPE)));
7469 if (count == 1)
7471 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7473 addr = save_expr (addr);
7474 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7475 tree new_var2 = new_var;
7476 tree ref2 = ref;
7477 if (is_fp_and_or)
7479 tree zero = build_zero_cst (TREE_TYPE (new_var));
7480 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7481 integer_type_node, new_var, zero);
7482 ref2 = fold_build2_loc (clause_loc, NE_EXPR, integer_type_node,
7483 ref, zero);
7485 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7486 new_var2);
7487 if (is_fp_and_or)
7488 x = fold_convert (TREE_TYPE (new_var), x);
7489 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7490 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7491 gimplify_and_add (x, stmt_seqp);
7492 return;
7494 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7496 tree d = OMP_CLAUSE_DECL (c);
7497 tree type = TREE_TYPE (d);
7498 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7499 tree i = create_tmp_var (TREE_TYPE (v));
7500 tree ptype = build_pointer_type (TREE_TYPE (type));
7501 tree bias = TREE_OPERAND (d, 1);
7502 d = TREE_OPERAND (d, 0);
7503 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7505 tree b = TREE_OPERAND (d, 1);
7506 b = maybe_lookup_decl (b, ctx);
7507 if (b == NULL)
7509 b = TREE_OPERAND (d, 1);
7510 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7512 if (integer_zerop (bias))
7513 bias = b;
7514 else
7516 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7517 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7518 TREE_TYPE (b), b, bias);
7520 d = TREE_OPERAND (d, 0);
7522 /* For ref build_outer_var_ref already performs this, so
7523 only new_var needs a dereference. */
7524 if (TREE_CODE (d) == INDIRECT_REF)
7526 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7527 gcc_assert (omp_is_reference (var) && var == orig_var);
7529 else if (TREE_CODE (d) == ADDR_EXPR)
7531 if (orig_var == var)
7533 new_var = build_fold_addr_expr (new_var);
7534 ref = build_fold_addr_expr (ref);
7537 else
7539 gcc_assert (orig_var == var);
7540 if (omp_is_reference (var))
7541 ref = build_fold_addr_expr (ref);
7543 if (DECL_P (v))
7545 tree t = maybe_lookup_decl (v, ctx);
7546 if (t)
7547 v = t;
7548 else
7549 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7550 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7552 if (!integer_zerop (bias))
7554 bias = fold_convert_loc (clause_loc, sizetype, bias);
7555 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7556 TREE_TYPE (new_var), new_var,
7557 unshare_expr (bias));
7558 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7559 TREE_TYPE (ref), ref, bias);
7561 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7562 ref = fold_convert_loc (clause_loc, ptype, ref);
7563 tree m = create_tmp_var (ptype);
7564 gimplify_assign (m, new_var, stmt_seqp);
7565 new_var = m;
7566 m = create_tmp_var (ptype);
7567 gimplify_assign (m, ref, stmt_seqp);
7568 ref = m;
7569 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7570 tree body = create_artificial_label (UNKNOWN_LOCATION);
7571 tree end = create_artificial_label (UNKNOWN_LOCATION);
7572 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7573 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7574 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7575 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7577 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7578 tree decl_placeholder
7579 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7580 SET_DECL_VALUE_EXPR (placeholder, out);
7581 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7582 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7583 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7584 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7585 gimple_seq_add_seq (&sub_seq,
7586 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7587 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7588 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7589 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7591 else
7593 tree out2 = out;
7594 tree priv2 = priv;
7595 if (is_fp_and_or)
7597 tree zero = build_zero_cst (TREE_TYPE (out));
7598 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7599 integer_type_node, out, zero);
7600 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7601 integer_type_node, priv, zero);
7603 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7604 if (is_fp_and_or)
7605 x = fold_convert (TREE_TYPE (out), x);
7606 out = unshare_expr (out);
7607 gimplify_assign (out, x, &sub_seq);
7609 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7610 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7611 gimple_seq_add_stmt (&sub_seq, g);
7612 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7613 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7614 gimple_seq_add_stmt (&sub_seq, g);
7615 g = gimple_build_assign (i, PLUS_EXPR, i,
7616 build_int_cst (TREE_TYPE (i), 1));
7617 gimple_seq_add_stmt (&sub_seq, g);
7618 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7619 gimple_seq_add_stmt (&sub_seq, g);
7620 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7622 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7624 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7626 if (omp_is_reference (var)
7627 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7628 TREE_TYPE (ref)))
7629 ref = build_fold_addr_expr_loc (clause_loc, ref);
7630 SET_DECL_VALUE_EXPR (placeholder, ref);
7631 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7632 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7633 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7634 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7635 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7637 else
7639 tree new_var2 = new_var;
7640 tree ref2 = ref;
7641 if (is_fp_and_or)
7643 tree zero = build_zero_cst (TREE_TYPE (new_var));
7644 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7645 integer_type_node, new_var, zero);
7646 ref2 = fold_build2_loc (clause_loc, NE_EXPR, integer_type_node,
7647 ref, zero);
7649 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7650 if (is_fp_and_or)
7651 x = fold_convert (TREE_TYPE (new_var), x);
7652 ref = build_outer_var_ref (var, ctx);
7653 gimplify_assign (ref, x, &sub_seq);
7657 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7659 gimple_seq_add_stmt (stmt_seqp, stmt);
7661 gimple_seq_add_seq (stmt_seqp, sub_seq);
7663 if (clist)
7665 gimple_seq_add_seq (stmt_seqp, *clist);
7666 *clist = NULL;
7669 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7671 gimple_seq_add_stmt (stmt_seqp, stmt);
7675 /* Generate code to implement the COPYPRIVATE clauses. */
7677 static void
7678 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7679 omp_context *ctx)
7681 tree c;
7683 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7685 tree var, new_var, ref, x;
7686 bool by_ref;
7687 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7689 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7690 continue;
7692 var = OMP_CLAUSE_DECL (c);
7693 by_ref = use_pointer_for_field (var, NULL);
7695 ref = build_sender_ref (var, ctx);
7696 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7697 if (by_ref)
7699 x = build_fold_addr_expr_loc (clause_loc, new_var);
7700 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7702 gimplify_assign (ref, x, slist);
7704 ref = build_receiver_ref (var, false, ctx);
7705 if (by_ref)
7707 ref = fold_convert_loc (clause_loc,
7708 build_pointer_type (TREE_TYPE (new_var)),
7709 ref);
7710 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7712 if (omp_is_reference (var))
7714 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7715 ref = build_simple_mem_ref_loc (clause_loc, ref);
7716 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7718 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7719 gimplify_and_add (x, rlist);
7724 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7725 and REDUCTION from the sender (aka parent) side. */
7727 static void
7728 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7729 omp_context *ctx)
7731 tree c, t;
7732 int ignored_looptemp = 0;
7733 bool is_taskloop = false;
7735 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7736 by GOMP_taskloop. */
7737 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7739 ignored_looptemp = 2;
7740 is_taskloop = true;
7743 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7745 tree val, ref, x, var;
7746 bool by_ref, do_in = false, do_out = false;
7747 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7749 switch (OMP_CLAUSE_CODE (c))
7751 case OMP_CLAUSE_PRIVATE:
7752 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7753 break;
7754 continue;
7755 case OMP_CLAUSE_FIRSTPRIVATE:
7756 case OMP_CLAUSE_COPYIN:
7757 case OMP_CLAUSE_LASTPRIVATE:
7758 case OMP_CLAUSE_IN_REDUCTION:
7759 case OMP_CLAUSE__REDUCTEMP_:
7760 break;
7761 case OMP_CLAUSE_REDUCTION:
7762 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7763 continue;
7764 break;
7765 case OMP_CLAUSE_SHARED:
7766 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7767 break;
7768 continue;
7769 case OMP_CLAUSE__LOOPTEMP_:
7770 if (ignored_looptemp)
7772 ignored_looptemp--;
7773 continue;
7775 break;
7776 default:
7777 continue;
7780 val = OMP_CLAUSE_DECL (c);
7781 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7782 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7783 && TREE_CODE (val) == MEM_REF)
7785 val = TREE_OPERAND (val, 0);
7786 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7787 val = TREE_OPERAND (val, 0);
7788 if (TREE_CODE (val) == INDIRECT_REF
7789 || TREE_CODE (val) == ADDR_EXPR)
7790 val = TREE_OPERAND (val, 0);
7791 if (is_variable_sized (val))
7792 continue;
7795 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7796 outer taskloop region. */
7797 omp_context *ctx_for_o = ctx;
7798 if (is_taskloop
7799 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7800 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7801 ctx_for_o = ctx->outer;
7803 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7805 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7806 && is_global_var (var)
7807 && (val == OMP_CLAUSE_DECL (c)
7808 || !is_task_ctx (ctx)
7809 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7810 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7811 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7812 != POINTER_TYPE)))))
7813 continue;
7815 t = omp_member_access_dummy_var (var);
7816 if (t)
7818 var = DECL_VALUE_EXPR (var);
7819 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7820 if (o != t)
7821 var = unshare_and_remap (var, t, o);
7822 else
7823 var = unshare_expr (var);
7826 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7828 /* Handle taskloop firstprivate/lastprivate, where the
7829 lastprivate on GIMPLE_OMP_TASK is represented as
7830 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7831 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7832 x = omp_build_component_ref (ctx->sender_decl, f);
7833 if (use_pointer_for_field (val, ctx))
7834 var = build_fold_addr_expr (var);
7835 gimplify_assign (x, var, ilist);
7836 DECL_ABSTRACT_ORIGIN (f) = NULL;
7837 continue;
7840 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7841 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7842 || val == OMP_CLAUSE_DECL (c))
7843 && is_variable_sized (val))
7844 continue;
7845 by_ref = use_pointer_for_field (val, NULL);
7847 switch (OMP_CLAUSE_CODE (c))
7849 case OMP_CLAUSE_FIRSTPRIVATE:
7850 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7851 && !by_ref
7852 && is_task_ctx (ctx))
7853 TREE_NO_WARNING (var) = 1;
7854 do_in = true;
7855 break;
7857 case OMP_CLAUSE_PRIVATE:
7858 case OMP_CLAUSE_COPYIN:
7859 case OMP_CLAUSE__LOOPTEMP_:
7860 case OMP_CLAUSE__REDUCTEMP_:
7861 do_in = true;
7862 break;
7864 case OMP_CLAUSE_LASTPRIVATE:
7865 if (by_ref || omp_is_reference (val))
7867 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7868 continue;
7869 do_in = true;
7871 else
7873 do_out = true;
7874 if (lang_hooks.decls.omp_private_outer_ref (val))
7875 do_in = true;
7877 break;
7879 case OMP_CLAUSE_REDUCTION:
7880 case OMP_CLAUSE_IN_REDUCTION:
7881 do_in = true;
7882 if (val == OMP_CLAUSE_DECL (c))
7884 if (is_task_ctx (ctx))
7885 by_ref = use_pointer_for_field (val, ctx);
7886 else
7887 do_out = !(by_ref || omp_is_reference (val));
7889 else
7890 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7891 break;
7893 default:
7894 gcc_unreachable ();
7897 if (do_in)
7899 ref = build_sender_ref (val, ctx);
7900 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7901 gimplify_assign (ref, x, ilist);
7902 if (is_task_ctx (ctx))
7903 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7906 if (do_out)
7908 ref = build_sender_ref (val, ctx);
7909 gimplify_assign (var, ref, olist);
7914 /* Generate code to implement SHARED from the sender (aka parent)
7915 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7916 list things that got automatically shared. */
7918 static void
7919 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7921 tree var, ovar, nvar, t, f, x, record_type;
7923 if (ctx->record_type == NULL)
7924 return;
7926 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7927 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7929 ovar = DECL_ABSTRACT_ORIGIN (f);
7930 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7931 continue;
7933 nvar = maybe_lookup_decl (ovar, ctx);
7934 if (!nvar
7935 || !DECL_HAS_VALUE_EXPR_P (nvar)
7936 || (ctx->allocate_map
7937 && ctx->allocate_map->get (ovar)))
7938 continue;
7940 /* If CTX is a nested parallel directive. Find the immediately
7941 enclosing parallel or workshare construct that contains a
7942 mapping for OVAR. */
7943 var = lookup_decl_in_outer_ctx (ovar, ctx);
7945 t = omp_member_access_dummy_var (var);
7946 if (t)
7948 var = DECL_VALUE_EXPR (var);
7949 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7950 if (o != t)
7951 var = unshare_and_remap (var, t, o);
7952 else
7953 var = unshare_expr (var);
7956 if (use_pointer_for_field (ovar, ctx))
7958 x = build_sender_ref (ovar, ctx);
7959 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7960 && TREE_TYPE (f) == TREE_TYPE (ovar))
7962 gcc_assert (is_parallel_ctx (ctx)
7963 && DECL_ARTIFICIAL (ovar));
7964 /* _condtemp_ clause. */
7965 var = build_constructor (TREE_TYPE (x), NULL);
7967 else
7968 var = build_fold_addr_expr (var);
7969 gimplify_assign (x, var, ilist);
7971 else
7973 x = build_sender_ref (ovar, ctx);
7974 gimplify_assign (x, var, ilist);
7976 if (!TREE_READONLY (var)
7977 /* We don't need to receive a new reference to a result
7978 or parm decl. In fact we may not store to it as we will
7979 invalidate any pending RSO and generate wrong gimple
7980 during inlining. */
7981 && !((TREE_CODE (var) == RESULT_DECL
7982 || TREE_CODE (var) == PARM_DECL)
7983 && DECL_BY_REFERENCE (var)))
7985 x = build_sender_ref (ovar, ctx);
7986 gimplify_assign (var, x, olist);
7992 /* Emit an OpenACC head marker call, encapulating the partitioning and
7993 other information that must be processed by the target compiler.
7994 Return the maximum number of dimensions the associated loop might
7995 be partitioned over. */
7997 static unsigned
7998 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7999 gimple_seq *seq, omp_context *ctx)
8001 unsigned levels = 0;
8002 unsigned tag = 0;
8003 tree gang_static = NULL_TREE;
8004 auto_vec<tree, 5> args;
8006 args.quick_push (build_int_cst
8007 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8008 args.quick_push (ddvar);
8009 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8011 switch (OMP_CLAUSE_CODE (c))
8013 case OMP_CLAUSE_GANG:
8014 tag |= OLF_DIM_GANG;
8015 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8016 /* static:* is represented by -1, and we can ignore it, as
8017 scheduling is always static. */
8018 if (gang_static && integer_minus_onep (gang_static))
8019 gang_static = NULL_TREE;
8020 levels++;
8021 break;
8023 case OMP_CLAUSE_WORKER:
8024 tag |= OLF_DIM_WORKER;
8025 levels++;
8026 break;
8028 case OMP_CLAUSE_VECTOR:
8029 tag |= OLF_DIM_VECTOR;
8030 levels++;
8031 break;
8033 case OMP_CLAUSE_SEQ:
8034 tag |= OLF_SEQ;
8035 break;
8037 case OMP_CLAUSE_AUTO:
8038 tag |= OLF_AUTO;
8039 break;
8041 case OMP_CLAUSE_INDEPENDENT:
8042 tag |= OLF_INDEPENDENT;
8043 break;
8045 case OMP_CLAUSE_TILE:
8046 tag |= OLF_TILE;
8047 break;
8049 default:
8050 continue;
8054 if (gang_static)
8056 if (DECL_P (gang_static))
8057 gang_static = build_outer_var_ref (gang_static, ctx);
8058 tag |= OLF_GANG_STATIC;
8061 omp_context *tgt = enclosing_target_ctx (ctx);
8062 if (!tgt || is_oacc_parallel_or_serial (tgt))
8064 else if (is_oacc_kernels (tgt))
8065 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8066 gcc_unreachable ();
8067 else if (is_oacc_kernels_decomposed_part (tgt))
8069 else
8070 gcc_unreachable ();
8072 /* In a parallel region, loops are implicitly INDEPENDENT. */
8073 if (!tgt || is_oacc_parallel_or_serial (tgt))
8074 tag |= OLF_INDEPENDENT;
8076 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8077 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8078 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8080 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8081 gcc_assert (!(tag & OLF_AUTO));
8084 if (tag & OLF_TILE)
8085 /* Tiling could use all 3 levels. */
8086 levels = 3;
8087 else
8089 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8090 Ensure at least one level, or 2 for possible auto
8091 partitioning */
8092 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8093 << OLF_DIM_BASE) | OLF_SEQ));
8095 if (levels < 1u + maybe_auto)
8096 levels = 1u + maybe_auto;
8099 args.quick_push (build_int_cst (integer_type_node, levels));
8100 args.quick_push (build_int_cst (integer_type_node, tag));
8101 if (gang_static)
8102 args.quick_push (gang_static);
8104 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8105 gimple_set_location (call, loc);
8106 gimple_set_lhs (call, ddvar);
8107 gimple_seq_add_stmt (seq, call);
8109 return levels;
8112 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8113 partitioning level of the enclosed region. */
8115 static void
8116 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8117 tree tofollow, gimple_seq *seq)
8119 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8120 : IFN_UNIQUE_OACC_TAIL_MARK);
8121 tree marker = build_int_cst (integer_type_node, marker_kind);
8122 int nargs = 2 + (tofollow != NULL_TREE);
8123 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8124 marker, ddvar, tofollow);
8125 gimple_set_location (call, loc);
8126 gimple_set_lhs (call, ddvar);
8127 gimple_seq_add_stmt (seq, call);
8130 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8131 the loop clauses, from which we extract reductions. Initialize
8132 HEAD and TAIL. */
8134 static void
8135 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8136 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8138 bool inner = false;
8139 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8140 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8142 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8144 if (private_marker)
8146 gimple_set_location (private_marker, loc);
8147 gimple_call_set_lhs (private_marker, ddvar);
8148 gimple_call_set_arg (private_marker, 1, ddvar);
8151 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8152 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8154 gcc_assert (count);
8155 for (unsigned done = 1; count; count--, done++)
8157 gimple_seq fork_seq = NULL;
8158 gimple_seq join_seq = NULL;
8160 tree place = build_int_cst (integer_type_node, -1);
8161 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8162 fork_kind, ddvar, place);
8163 gimple_set_location (fork, loc);
8164 gimple_set_lhs (fork, ddvar);
8166 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8167 join_kind, ddvar, place);
8168 gimple_set_location (join, loc);
8169 gimple_set_lhs (join, ddvar);
8171 /* Mark the beginning of this level sequence. */
8172 if (inner)
8173 lower_oacc_loop_marker (loc, ddvar, true,
8174 build_int_cst (integer_type_node, count),
8175 &fork_seq);
8176 lower_oacc_loop_marker (loc, ddvar, false,
8177 build_int_cst (integer_type_node, done),
8178 &join_seq);
8180 lower_oacc_reductions (loc, clauses, place, inner,
8181 fork, (count == 1) ? private_marker : NULL,
8182 join, &fork_seq, &join_seq, ctx);
8184 /* Append this level to head. */
8185 gimple_seq_add_seq (head, fork_seq);
8186 /* Prepend it to tail. */
8187 gimple_seq_add_seq (&join_seq, *tail);
8188 *tail = join_seq;
8190 inner = true;
8193 /* Mark the end of the sequence. */
8194 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8195 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8198 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8199 catch handler and return it. This prevents programs from violating the
8200 structured block semantics with throws. */
8202 static gimple_seq
8203 maybe_catch_exception (gimple_seq body)
8205 gimple *g;
8206 tree decl;
8208 if (!flag_exceptions)
8209 return body;
8211 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8212 decl = lang_hooks.eh_protect_cleanup_actions ();
8213 else
8214 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8216 g = gimple_build_eh_must_not_throw (decl);
8217 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8218 GIMPLE_TRY_CATCH);
8220 return gimple_seq_alloc_with_stmt (g);
8224 /* Routines to lower OMP directives into OMP-GIMPLE. */
8226 /* If ctx is a worksharing context inside of a cancellable parallel
8227 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8228 and conditional branch to parallel's cancel_label to handle
8229 cancellation in the implicit barrier. */
8231 static void
8232 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8233 gimple_seq *body)
8235 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8236 if (gimple_omp_return_nowait_p (omp_return))
8237 return;
8238 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8239 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8240 && outer->cancellable)
8242 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8243 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8244 tree lhs = create_tmp_var (c_bool_type);
8245 gimple_omp_return_set_lhs (omp_return, lhs);
8246 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8247 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8248 fold_convert (c_bool_type,
8249 boolean_false_node),
8250 outer->cancel_label, fallthru_label);
8251 gimple_seq_add_stmt (body, g);
8252 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8254 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8255 return;
8258 /* Find the first task_reduction or reduction clause or return NULL
8259 if there are none. */
8261 static inline tree
8262 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8263 enum omp_clause_code ccode)
8265 while (1)
8267 clauses = omp_find_clause (clauses, ccode);
8268 if (clauses == NULL_TREE)
8269 return NULL_TREE;
8270 if (ccode != OMP_CLAUSE_REDUCTION
8271 || code == OMP_TASKLOOP
8272 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8273 return clauses;
8274 clauses = OMP_CLAUSE_CHAIN (clauses);
8278 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8279 gimple_seq *, gimple_seq *);
8281 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8282 CTX is the enclosing OMP context for the current statement. */
8284 static void
8285 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8287 tree block, control;
8288 gimple_stmt_iterator tgsi;
8289 gomp_sections *stmt;
8290 gimple *t;
8291 gbind *new_stmt, *bind;
8292 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8294 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8296 push_gimplify_context ();
8298 dlist = NULL;
8299 ilist = NULL;
8301 tree rclauses
8302 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8303 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8304 tree rtmp = NULL_TREE;
8305 if (rclauses)
8307 tree type = build_pointer_type (pointer_sized_int_node);
8308 tree temp = create_tmp_var (type);
8309 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8310 OMP_CLAUSE_DECL (c) = temp;
8311 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8312 gimple_omp_sections_set_clauses (stmt, c);
8313 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8314 gimple_omp_sections_clauses (stmt),
8315 &ilist, &tred_dlist);
8316 rclauses = c;
8317 rtmp = make_ssa_name (type);
8318 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8321 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8322 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8324 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8325 &ilist, &dlist, ctx, NULL);
8327 control = create_tmp_var (unsigned_type_node, ".section");
8328 gimple_omp_sections_set_control (stmt, control);
8330 new_body = gimple_omp_body (stmt);
8331 gimple_omp_set_body (stmt, NULL);
8332 tgsi = gsi_start (new_body);
8333 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8335 omp_context *sctx;
8336 gimple *sec_start;
8338 sec_start = gsi_stmt (tgsi);
8339 sctx = maybe_lookup_ctx (sec_start);
8340 gcc_assert (sctx);
8342 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8343 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8344 GSI_CONTINUE_LINKING);
8345 gimple_omp_set_body (sec_start, NULL);
8347 if (gsi_one_before_end_p (tgsi))
8349 gimple_seq l = NULL;
8350 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8351 &ilist, &l, &clist, ctx);
8352 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8353 gimple_omp_section_set_last (sec_start);
8356 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8357 GSI_CONTINUE_LINKING);
8360 block = make_node (BLOCK);
8361 bind = gimple_build_bind (NULL, new_body, block);
8363 olist = NULL;
8364 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8365 &clist, ctx);
8366 if (clist)
8368 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8369 gcall *g = gimple_build_call (fndecl, 0);
8370 gimple_seq_add_stmt (&olist, g);
8371 gimple_seq_add_seq (&olist, clist);
8372 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8373 g = gimple_build_call (fndecl, 0);
8374 gimple_seq_add_stmt (&olist, g);
8377 block = make_node (BLOCK);
8378 new_stmt = gimple_build_bind (NULL, NULL, block);
8379 gsi_replace (gsi_p, new_stmt, true);
8381 pop_gimplify_context (new_stmt);
8382 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8383 BLOCK_VARS (block) = gimple_bind_vars (bind);
8384 if (BLOCK_VARS (block))
8385 TREE_USED (block) = 1;
8387 new_body = NULL;
8388 gimple_seq_add_seq (&new_body, ilist);
8389 gimple_seq_add_stmt (&new_body, stmt);
8390 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8391 gimple_seq_add_stmt (&new_body, bind);
8393 t = gimple_build_omp_continue (control, control);
8394 gimple_seq_add_stmt (&new_body, t);
8396 gimple_seq_add_seq (&new_body, olist);
8397 if (ctx->cancellable)
8398 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8399 gimple_seq_add_seq (&new_body, dlist);
8401 new_body = maybe_catch_exception (new_body);
8403 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8404 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8405 t = gimple_build_omp_return (nowait);
8406 gimple_seq_add_stmt (&new_body, t);
8407 gimple_seq_add_seq (&new_body, tred_dlist);
8408 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8410 if (rclauses)
8411 OMP_CLAUSE_DECL (rclauses) = rtmp;
8413 gimple_bind_set_body (new_stmt, new_body);
8417 /* A subroutine of lower_omp_single. Expand the simple form of
8418 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8420 if (GOMP_single_start ())
8421 BODY;
8422 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8424 FIXME. It may be better to delay expanding the logic of this until
8425 pass_expand_omp. The expanded logic may make the job more difficult
8426 to a synchronization analysis pass. */
8428 static void
8429 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8431 location_t loc = gimple_location (single_stmt);
8432 tree tlabel = create_artificial_label (loc);
8433 tree flabel = create_artificial_label (loc);
8434 gimple *call, *cond;
8435 tree lhs, decl;
8437 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8438 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8439 call = gimple_build_call (decl, 0);
8440 gimple_call_set_lhs (call, lhs);
8441 gimple_seq_add_stmt (pre_p, call);
8443 cond = gimple_build_cond (EQ_EXPR, lhs,
8444 fold_convert_loc (loc, TREE_TYPE (lhs),
8445 boolean_true_node),
8446 tlabel, flabel);
8447 gimple_seq_add_stmt (pre_p, cond);
8448 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8449 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8450 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8454 /* A subroutine of lower_omp_single. Expand the simple form of
8455 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8457 #pragma omp single copyprivate (a, b, c)
8459 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8462 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8464 BODY;
8465 copyout.a = a;
8466 copyout.b = b;
8467 copyout.c = c;
8468 GOMP_single_copy_end (&copyout);
8470 else
8472 a = copyout_p->a;
8473 b = copyout_p->b;
8474 c = copyout_p->c;
8476 GOMP_barrier ();
8479 FIXME. It may be better to delay expanding the logic of this until
8480 pass_expand_omp. The expanded logic may make the job more difficult
8481 to a synchronization analysis pass. */
8483 static void
8484 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8485 omp_context *ctx)
8487 tree ptr_type, t, l0, l1, l2, bfn_decl;
8488 gimple_seq copyin_seq;
8489 location_t loc = gimple_location (single_stmt);
8491 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8493 ptr_type = build_pointer_type (ctx->record_type);
8494 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8496 l0 = create_artificial_label (loc);
8497 l1 = create_artificial_label (loc);
8498 l2 = create_artificial_label (loc);
8500 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8501 t = build_call_expr_loc (loc, bfn_decl, 0);
8502 t = fold_convert_loc (loc, ptr_type, t);
8503 gimplify_assign (ctx->receiver_decl, t, pre_p);
8505 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8506 build_int_cst (ptr_type, 0));
8507 t = build3 (COND_EXPR, void_type_node, t,
8508 build_and_jump (&l0), build_and_jump (&l1));
8509 gimplify_and_add (t, pre_p);
8511 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8513 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8515 copyin_seq = NULL;
8516 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8517 &copyin_seq, ctx);
8519 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8520 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8521 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8522 gimplify_and_add (t, pre_p);
8524 t = build_and_jump (&l2);
8525 gimplify_and_add (t, pre_p);
8527 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8529 gimple_seq_add_seq (pre_p, copyin_seq);
8531 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8535 /* Expand code for an OpenMP single directive. */
8537 static void
8538 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8540 tree block;
8541 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8542 gbind *bind;
8543 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8545 push_gimplify_context ();
8547 block = make_node (BLOCK);
8548 bind = gimple_build_bind (NULL, NULL, block);
8549 gsi_replace (gsi_p, bind, true);
8550 bind_body = NULL;
8551 dlist = NULL;
8552 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8553 &bind_body, &dlist, ctx, NULL);
8554 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8556 gimple_seq_add_stmt (&bind_body, single_stmt);
8558 if (ctx->record_type)
8559 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8560 else
8561 lower_omp_single_simple (single_stmt, &bind_body);
8563 gimple_omp_set_body (single_stmt, NULL);
8565 gimple_seq_add_seq (&bind_body, dlist);
8567 bind_body = maybe_catch_exception (bind_body);
8569 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8570 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8571 gimple *g = gimple_build_omp_return (nowait);
8572 gimple_seq_add_stmt (&bind_body_tail, g);
8573 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8574 if (ctx->record_type)
8576 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8577 tree clobber = build_clobber (ctx->record_type);
8578 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8579 clobber), GSI_SAME_STMT);
8581 gimple_seq_add_seq (&bind_body, bind_body_tail);
8582 gimple_bind_set_body (bind, bind_body);
8584 pop_gimplify_context (bind);
8586 gimple_bind_append_vars (bind, ctx->block_vars);
8587 BLOCK_VARS (block) = ctx->block_vars;
8588 if (BLOCK_VARS (block))
8589 TREE_USED (block) = 1;
8593 /* Expand code for an OpenMP master directive. */
8595 static void
8596 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8598 tree block, lab = NULL, x, bfn_decl;
8599 gimple *stmt = gsi_stmt (*gsi_p);
8600 gbind *bind;
8601 location_t loc = gimple_location (stmt);
8602 gimple_seq tseq;
8604 push_gimplify_context ();
8606 block = make_node (BLOCK);
8607 bind = gimple_build_bind (NULL, NULL, block);
8608 gsi_replace (gsi_p, bind, true);
8609 gimple_bind_add_stmt (bind, stmt);
8611 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8612 x = build_call_expr_loc (loc, bfn_decl, 0);
8613 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8614 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8615 tseq = NULL;
8616 gimplify_and_add (x, &tseq);
8617 gimple_bind_add_seq (bind, tseq);
8619 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8620 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8621 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8622 gimple_omp_set_body (stmt, NULL);
8624 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8626 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8628 pop_gimplify_context (bind);
8630 gimple_bind_append_vars (bind, ctx->block_vars);
8631 BLOCK_VARS (block) = ctx->block_vars;
8634 /* Helper function for lower_omp_task_reductions. For a specific PASS
8635 find out the current clause it should be processed, or return false
8636 if all have been processed already. */
8638 static inline bool
8639 omp_task_reduction_iterate (int pass, enum tree_code code,
8640 enum omp_clause_code ccode, tree *c, tree *decl,
8641 tree *type, tree *next)
8643 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8645 if (ccode == OMP_CLAUSE_REDUCTION
8646 && code != OMP_TASKLOOP
8647 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8648 continue;
8649 *decl = OMP_CLAUSE_DECL (*c);
8650 *type = TREE_TYPE (*decl);
8651 if (TREE_CODE (*decl) == MEM_REF)
8653 if (pass != 1)
8654 continue;
8656 else
8658 if (omp_is_reference (*decl))
8659 *type = TREE_TYPE (*type);
8660 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8661 continue;
8663 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8664 return true;
8666 *decl = NULL_TREE;
8667 *type = NULL_TREE;
8668 *next = NULL_TREE;
8669 return false;
8672 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8673 OMP_TASKGROUP only with task modifier). Register mapping of those in
8674 START sequence and reducing them and unregister them in the END sequence. */
8676 static void
8677 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8678 gimple_seq *start, gimple_seq *end)
8680 enum omp_clause_code ccode
8681 = (code == OMP_TASKGROUP
8682 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8683 tree cancellable = NULL_TREE;
8684 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8685 if (clauses == NULL_TREE)
8686 return;
8687 if (code == OMP_FOR || code == OMP_SECTIONS)
8689 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8690 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8691 && outer->cancellable)
8693 cancellable = error_mark_node;
8694 break;
8696 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8697 break;
8699 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8700 tree *last = &TYPE_FIELDS (record_type);
8701 unsigned cnt = 0;
8702 if (cancellable)
8704 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8705 ptr_type_node);
8706 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8707 integer_type_node);
8708 *last = field;
8709 DECL_CHAIN (field) = ifield;
8710 last = &DECL_CHAIN (ifield);
8711 DECL_CONTEXT (field) = record_type;
8712 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8713 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8714 DECL_CONTEXT (ifield) = record_type;
8715 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8716 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8718 for (int pass = 0; pass < 2; pass++)
8720 tree decl, type, next;
8721 for (tree c = clauses;
8722 omp_task_reduction_iterate (pass, code, ccode,
8723 &c, &decl, &type, &next); c = next)
8725 ++cnt;
8726 tree new_type = type;
8727 if (ctx->outer)
8728 new_type = remap_type (type, &ctx->outer->cb);
8729 tree field
8730 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8731 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8732 new_type);
8733 if (DECL_P (decl) && type == TREE_TYPE (decl))
8735 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8736 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8737 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8739 else
8740 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8741 DECL_CONTEXT (field) = record_type;
8742 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8743 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8744 *last = field;
8745 last = &DECL_CHAIN (field);
8746 tree bfield
8747 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8748 boolean_type_node);
8749 DECL_CONTEXT (bfield) = record_type;
8750 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8751 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8752 *last = bfield;
8753 last = &DECL_CHAIN (bfield);
8756 *last = NULL_TREE;
8757 layout_type (record_type);
8759 /* Build up an array which registers with the runtime all the reductions
8760 and deregisters them at the end. Format documented in libgomp/task.c. */
8761 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8762 tree avar = create_tmp_var_raw (atype);
8763 gimple_add_tmp_var (avar);
8764 TREE_ADDRESSABLE (avar) = 1;
8765 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8766 NULL_TREE, NULL_TREE);
8767 tree t = build_int_cst (pointer_sized_int_node, cnt);
8768 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8769 gimple_seq seq = NULL;
8770 tree sz = fold_convert (pointer_sized_int_node,
8771 TYPE_SIZE_UNIT (record_type));
8772 int cachesz = 64;
8773 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8774 build_int_cst (pointer_sized_int_node, cachesz - 1));
8775 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8776 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8777 ctx->task_reductions.create (1 + cnt);
8778 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8779 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8780 ? sz : NULL_TREE);
8781 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8782 gimple_seq_add_seq (start, seq);
8783 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8784 NULL_TREE, NULL_TREE);
8785 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8786 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8787 NULL_TREE, NULL_TREE);
8788 t = build_int_cst (pointer_sized_int_node,
8789 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8790 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8791 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8792 NULL_TREE, NULL_TREE);
8793 t = build_int_cst (pointer_sized_int_node, -1);
8794 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8795 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8796 NULL_TREE, NULL_TREE);
8797 t = build_int_cst (pointer_sized_int_node, 0);
8798 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8800 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8801 and for each task reduction checks a bool right after the private variable
8802 within that thread's chunk; if the bool is clear, it hasn't been
8803 initialized and thus isn't going to be reduced nor destructed, otherwise
8804 reduce and destruct it. */
8805 tree idx = create_tmp_var (size_type_node);
8806 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8807 tree num_thr_sz = create_tmp_var (size_type_node);
8808 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8809 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8810 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
8811 gimple *g;
8812 if (code == OMP_FOR || code == OMP_SECTIONS)
8814 /* For worksharing constructs, only perform it in the master thread,
8815 with the exception of cancelled implicit barriers - then only handle
8816 the current thread. */
8817 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8818 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8819 tree thr_num = create_tmp_var (integer_type_node);
8820 g = gimple_build_call (t, 0);
8821 gimple_call_set_lhs (g, thr_num);
8822 gimple_seq_add_stmt (end, g);
8823 if (cancellable)
8825 tree c;
8826 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8827 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8828 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8829 if (code == OMP_FOR)
8830 c = gimple_omp_for_clauses (ctx->stmt);
8831 else /* if (code == OMP_SECTIONS) */
8832 c = gimple_omp_sections_clauses (ctx->stmt);
8833 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8834 cancellable = c;
8835 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8836 lab5, lab6);
8837 gimple_seq_add_stmt (end, g);
8838 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8839 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8840 gimple_seq_add_stmt (end, g);
8841 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8842 build_one_cst (TREE_TYPE (idx)));
8843 gimple_seq_add_stmt (end, g);
8844 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8845 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8847 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8848 gimple_seq_add_stmt (end, g);
8849 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8851 if (code != OMP_PARALLEL)
8853 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8854 tree num_thr = create_tmp_var (integer_type_node);
8855 g = gimple_build_call (t, 0);
8856 gimple_call_set_lhs (g, num_thr);
8857 gimple_seq_add_stmt (end, g);
8858 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8859 gimple_seq_add_stmt (end, g);
8860 if (cancellable)
8861 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8863 else
8865 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8866 OMP_CLAUSE__REDUCTEMP_);
8867 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8868 t = fold_convert (size_type_node, t);
8869 gimplify_assign (num_thr_sz, t, end);
8871 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8872 NULL_TREE, NULL_TREE);
8873 tree data = create_tmp_var (pointer_sized_int_node);
8874 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8875 if (code == OMP_TASKLOOP)
8877 lab7 = create_artificial_label (UNKNOWN_LOCATION);
8878 g = gimple_build_cond (NE_EXPR, data,
8879 build_zero_cst (pointer_sized_int_node),
8880 lab1, lab7);
8881 gimple_seq_add_stmt (end, g);
8883 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8884 tree ptr;
8885 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8886 ptr = create_tmp_var (build_pointer_type (record_type));
8887 else
8888 ptr = create_tmp_var (ptr_type_node);
8889 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8891 tree field = TYPE_FIELDS (record_type);
8892 cnt = 0;
8893 if (cancellable)
8894 field = DECL_CHAIN (DECL_CHAIN (field));
8895 for (int pass = 0; pass < 2; pass++)
8897 tree decl, type, next;
8898 for (tree c = clauses;
8899 omp_task_reduction_iterate (pass, code, ccode,
8900 &c, &decl, &type, &next); c = next)
8902 tree var = decl, ref;
8903 if (TREE_CODE (decl) == MEM_REF)
8905 var = TREE_OPERAND (var, 0);
8906 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8907 var = TREE_OPERAND (var, 0);
8908 tree v = var;
8909 if (TREE_CODE (var) == ADDR_EXPR)
8910 var = TREE_OPERAND (var, 0);
8911 else if (TREE_CODE (var) == INDIRECT_REF)
8912 var = TREE_OPERAND (var, 0);
8913 tree orig_var = var;
8914 if (is_variable_sized (var))
8916 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8917 var = DECL_VALUE_EXPR (var);
8918 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8919 var = TREE_OPERAND (var, 0);
8920 gcc_assert (DECL_P (var));
8922 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8923 if (orig_var != var)
8924 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8925 else if (TREE_CODE (v) == ADDR_EXPR)
8926 t = build_fold_addr_expr (t);
8927 else if (TREE_CODE (v) == INDIRECT_REF)
8928 t = build_fold_indirect_ref (t);
8929 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8931 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8932 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8933 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8935 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8936 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8937 fold_convert (size_type_node,
8938 TREE_OPERAND (decl, 1)));
8940 else
8942 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8943 if (!omp_is_reference (decl))
8944 t = build_fold_addr_expr (t);
8946 t = fold_convert (pointer_sized_int_node, t);
8947 seq = NULL;
8948 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8949 gimple_seq_add_seq (start, seq);
8950 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8951 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8952 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8953 t = unshare_expr (byte_position (field));
8954 t = fold_convert (pointer_sized_int_node, t);
8955 ctx->task_reduction_map->put (c, cnt);
8956 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8957 ? t : NULL_TREE);
8958 seq = NULL;
8959 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8960 gimple_seq_add_seq (start, seq);
8961 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8962 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8963 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8965 tree bfield = DECL_CHAIN (field);
8966 tree cond;
8967 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8968 /* In parallel or worksharing all threads unconditionally
8969 initialize all their task reduction private variables. */
8970 cond = boolean_true_node;
8971 else if (TREE_TYPE (ptr) == ptr_type_node)
8973 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8974 unshare_expr (byte_position (bfield)));
8975 seq = NULL;
8976 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8977 gimple_seq_add_seq (end, seq);
8978 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8979 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8980 build_int_cst (pbool, 0));
8982 else
8983 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8984 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8985 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8986 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8987 tree condv = create_tmp_var (boolean_type_node);
8988 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8989 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8990 lab3, lab4);
8991 gimple_seq_add_stmt (end, g);
8992 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8993 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8995 /* If this reduction doesn't need destruction and parallel
8996 has been cancelled, there is nothing to do for this
8997 reduction, so jump around the merge operation. */
8998 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8999 g = gimple_build_cond (NE_EXPR, cancellable,
9000 build_zero_cst (TREE_TYPE (cancellable)),
9001 lab4, lab5);
9002 gimple_seq_add_stmt (end, g);
9003 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9006 tree new_var;
9007 if (TREE_TYPE (ptr) == ptr_type_node)
9009 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9010 unshare_expr (byte_position (field)));
9011 seq = NULL;
9012 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9013 gimple_seq_add_seq (end, seq);
9014 tree pbool = build_pointer_type (TREE_TYPE (field));
9015 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9016 build_int_cst (pbool, 0));
9018 else
9019 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9020 build_simple_mem_ref (ptr), field, NULL_TREE);
9022 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9023 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
9024 ref = build_simple_mem_ref (ref);
9025 /* reduction(-:var) sums up the partial results, so it acts
9026 identically to reduction(+:var). */
9027 if (rcode == MINUS_EXPR)
9028 rcode = PLUS_EXPR;
9029 if (TREE_CODE (decl) == MEM_REF)
9031 tree type = TREE_TYPE (new_var);
9032 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9033 tree i = create_tmp_var (TREE_TYPE (v));
9034 tree ptype = build_pointer_type (TREE_TYPE (type));
9035 if (DECL_P (v))
9037 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9038 tree vv = create_tmp_var (TREE_TYPE (v));
9039 gimplify_assign (vv, v, start);
9040 v = vv;
9042 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9043 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9044 new_var = build_fold_addr_expr (new_var);
9045 new_var = fold_convert (ptype, new_var);
9046 ref = fold_convert (ptype, ref);
9047 tree m = create_tmp_var (ptype);
9048 gimplify_assign (m, new_var, end);
9049 new_var = m;
9050 m = create_tmp_var (ptype);
9051 gimplify_assign (m, ref, end);
9052 ref = m;
9053 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9054 tree body = create_artificial_label (UNKNOWN_LOCATION);
9055 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9056 gimple_seq_add_stmt (end, gimple_build_label (body));
9057 tree priv = build_simple_mem_ref (new_var);
9058 tree out = build_simple_mem_ref (ref);
9059 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9061 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9062 tree decl_placeholder
9063 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9064 tree lab6 = NULL_TREE;
9065 if (cancellable)
9067 /* If this reduction needs destruction and parallel
9068 has been cancelled, jump around the merge operation
9069 to the destruction. */
9070 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9071 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9072 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9073 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9074 lab6, lab5);
9075 gimple_seq_add_stmt (end, g);
9076 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9078 SET_DECL_VALUE_EXPR (placeholder, out);
9079 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9080 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9081 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9082 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9083 gimple_seq_add_seq (end,
9084 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9085 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9086 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9088 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9089 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9091 if (cancellable)
9092 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9093 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9094 if (x)
9096 gimple_seq tseq = NULL;
9097 gimplify_stmt (&x, &tseq);
9098 gimple_seq_add_seq (end, tseq);
9101 else
9103 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9104 out = unshare_expr (out);
9105 gimplify_assign (out, x, end);
9107 gimple *g
9108 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9109 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9110 gimple_seq_add_stmt (end, g);
9111 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9112 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9113 gimple_seq_add_stmt (end, g);
9114 g = gimple_build_assign (i, PLUS_EXPR, i,
9115 build_int_cst (TREE_TYPE (i), 1));
9116 gimple_seq_add_stmt (end, g);
9117 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9118 gimple_seq_add_stmt (end, g);
9119 gimple_seq_add_stmt (end, gimple_build_label (endl));
9121 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9123 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9124 tree oldv = NULL_TREE;
9125 tree lab6 = NULL_TREE;
9126 if (cancellable)
9128 /* If this reduction needs destruction and parallel
9129 has been cancelled, jump around the merge operation
9130 to the destruction. */
9131 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9132 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9133 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9134 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9135 lab6, lab5);
9136 gimple_seq_add_stmt (end, g);
9137 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9139 if (omp_is_reference (decl)
9140 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9141 TREE_TYPE (ref)))
9142 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9143 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9144 tree refv = create_tmp_var (TREE_TYPE (ref));
9145 gimplify_assign (refv, ref, end);
9146 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9147 SET_DECL_VALUE_EXPR (placeholder, ref);
9148 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9149 tree d = maybe_lookup_decl (decl, ctx);
9150 gcc_assert (d);
9151 if (DECL_HAS_VALUE_EXPR_P (d))
9152 oldv = DECL_VALUE_EXPR (d);
9153 if (omp_is_reference (var))
9155 tree v = fold_convert (TREE_TYPE (d),
9156 build_fold_addr_expr (new_var));
9157 SET_DECL_VALUE_EXPR (d, v);
9159 else
9160 SET_DECL_VALUE_EXPR (d, new_var);
9161 DECL_HAS_VALUE_EXPR_P (d) = 1;
9162 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9163 if (oldv)
9164 SET_DECL_VALUE_EXPR (d, oldv);
9165 else
9167 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9168 DECL_HAS_VALUE_EXPR_P (d) = 0;
9170 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9171 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9172 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9173 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9174 if (cancellable)
9175 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9176 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9177 if (x)
9179 gimple_seq tseq = NULL;
9180 gimplify_stmt (&x, &tseq);
9181 gimple_seq_add_seq (end, tseq);
9184 else
9186 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9187 ref = unshare_expr (ref);
9188 gimplify_assign (ref, x, end);
9190 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9191 ++cnt;
9192 field = DECL_CHAIN (bfield);
9196 if (code == OMP_TASKGROUP)
9198 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9199 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9200 gimple_seq_add_stmt (start, g);
9202 else
9204 tree c;
9205 if (code == OMP_FOR)
9206 c = gimple_omp_for_clauses (ctx->stmt);
9207 else if (code == OMP_SECTIONS)
9208 c = gimple_omp_sections_clauses (ctx->stmt);
9209 else
9210 c = gimple_omp_taskreg_clauses (ctx->stmt);
9211 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9212 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9213 build_fold_addr_expr (avar));
9214 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9217 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9218 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9219 size_one_node));
9220 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9221 gimple_seq_add_stmt (end, g);
9222 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9223 if (code == OMP_FOR || code == OMP_SECTIONS)
9225 enum built_in_function bfn
9226 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9227 t = builtin_decl_explicit (bfn);
9228 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9229 tree arg;
9230 if (cancellable)
9232 arg = create_tmp_var (c_bool_type);
9233 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9234 cancellable));
9236 else
9237 arg = build_int_cst (c_bool_type, 0);
9238 g = gimple_build_call (t, 1, arg);
9240 else
9242 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9243 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9245 gimple_seq_add_stmt (end, g);
9246 if (lab7)
9247 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9248 t = build_constructor (atype, NULL);
9249 TREE_THIS_VOLATILE (t) = 1;
9250 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9253 /* Expand code for an OpenMP taskgroup directive. */
9255 static void
9256 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9258 gimple *stmt = gsi_stmt (*gsi_p);
9259 gcall *x;
9260 gbind *bind;
9261 gimple_seq dseq = NULL;
9262 tree block = make_node (BLOCK);
9264 bind = gimple_build_bind (NULL, NULL, block);
9265 gsi_replace (gsi_p, bind, true);
9266 gimple_bind_add_stmt (bind, stmt);
9268 push_gimplify_context ();
9270 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9272 gimple_bind_add_stmt (bind, x);
9274 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9275 gimple_omp_taskgroup_clauses (stmt),
9276 gimple_bind_body_ptr (bind), &dseq);
9278 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9279 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9280 gimple_omp_set_body (stmt, NULL);
9282 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9283 gimple_bind_add_seq (bind, dseq);
9285 pop_gimplify_context (bind);
9287 gimple_bind_append_vars (bind, ctx->block_vars);
9288 BLOCK_VARS (block) = ctx->block_vars;
9292 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9294 static void
9295 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9296 omp_context *ctx)
9298 struct omp_for_data fd;
9299 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9300 return;
9302 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9303 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9304 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9305 if (!fd.ordered)
9306 return;
9308 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9309 tree c = gimple_omp_ordered_clauses (ord_stmt);
9310 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9311 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9313 /* Merge depend clauses from multiple adjacent
9314 #pragma omp ordered depend(sink:...) constructs
9315 into one #pragma omp ordered depend(sink:...), so that
9316 we can optimize them together. */
9317 gimple_stmt_iterator gsi = *gsi_p;
9318 gsi_next (&gsi);
9319 while (!gsi_end_p (gsi))
9321 gimple *stmt = gsi_stmt (gsi);
9322 if (is_gimple_debug (stmt)
9323 || gimple_code (stmt) == GIMPLE_NOP)
9325 gsi_next (&gsi);
9326 continue;
9328 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9329 break;
9330 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9331 c = gimple_omp_ordered_clauses (ord_stmt2);
9332 if (c == NULL_TREE
9333 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9334 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9335 break;
9336 while (*list_p)
9337 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9338 *list_p = c;
9339 gsi_remove (&gsi, true);
9343 /* Canonicalize sink dependence clauses into one folded clause if
9344 possible.
9346 The basic algorithm is to create a sink vector whose first
9347 element is the GCD of all the first elements, and whose remaining
9348 elements are the minimum of the subsequent columns.
9350 We ignore dependence vectors whose first element is zero because
9351 such dependencies are known to be executed by the same thread.
9353 We take into account the direction of the loop, so a minimum
9354 becomes a maximum if the loop is iterating forwards. We also
9355 ignore sink clauses where the loop direction is unknown, or where
9356 the offsets are clearly invalid because they are not a multiple
9357 of the loop increment.
9359 For example:
9361 #pragma omp for ordered(2)
9362 for (i=0; i < N; ++i)
9363 for (j=0; j < M; ++j)
9365 #pragma omp ordered \
9366 depend(sink:i-8,j-2) \
9367 depend(sink:i,j-1) \ // Completely ignored because i+0.
9368 depend(sink:i-4,j-3) \
9369 depend(sink:i-6,j-4)
9370 #pragma omp ordered depend(source)
9373 Folded clause is:
9375 depend(sink:-gcd(8,4,6),-min(2,3,4))
9376 -or-
9377 depend(sink:-2,-2)
9380 /* FIXME: Computing GCD's where the first element is zero is
9381 non-trivial in the presence of collapsed loops. Do this later. */
9382 if (fd.collapse > 1)
9383 return;
9385 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9387 /* wide_int is not a POD so it must be default-constructed. */
9388 for (unsigned i = 0; i != 2 * len - 1; ++i)
9389 new (static_cast<void*>(folded_deps + i)) wide_int ();
9391 tree folded_dep = NULL_TREE;
9392 /* TRUE if the first dimension's offset is negative. */
9393 bool neg_offset_p = false;
9395 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9396 unsigned int i;
9397 while ((c = *list_p) != NULL)
9399 bool remove = false;
9401 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9402 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9403 goto next_ordered_clause;
9405 tree vec;
9406 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9407 vec && TREE_CODE (vec) == TREE_LIST;
9408 vec = TREE_CHAIN (vec), ++i)
9410 gcc_assert (i < len);
9412 /* omp_extract_for_data has canonicalized the condition. */
9413 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9414 || fd.loops[i].cond_code == GT_EXPR);
9415 bool forward = fd.loops[i].cond_code == LT_EXPR;
9416 bool maybe_lexically_later = true;
9418 /* While the committee makes up its mind, bail if we have any
9419 non-constant steps. */
9420 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9421 goto lower_omp_ordered_ret;
9423 tree itype = TREE_TYPE (TREE_VALUE (vec));
9424 if (POINTER_TYPE_P (itype))
9425 itype = sizetype;
9426 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9427 TYPE_PRECISION (itype),
9428 TYPE_SIGN (itype));
9430 /* Ignore invalid offsets that are not multiples of the step. */
9431 if (!wi::multiple_of_p (wi::abs (offset),
9432 wi::abs (wi::to_wide (fd.loops[i].step)),
9433 UNSIGNED))
9435 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9436 "ignoring sink clause with offset that is not "
9437 "a multiple of the loop step");
9438 remove = true;
9439 goto next_ordered_clause;
9442 /* Calculate the first dimension. The first dimension of
9443 the folded dependency vector is the GCD of the first
9444 elements, while ignoring any first elements whose offset
9445 is 0. */
9446 if (i == 0)
9448 /* Ignore dependence vectors whose first dimension is 0. */
9449 if (offset == 0)
9451 remove = true;
9452 goto next_ordered_clause;
9454 else
9456 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9458 error_at (OMP_CLAUSE_LOCATION (c),
9459 "first offset must be in opposite direction "
9460 "of loop iterations");
9461 goto lower_omp_ordered_ret;
9463 if (forward)
9464 offset = -offset;
9465 neg_offset_p = forward;
9466 /* Initialize the first time around. */
9467 if (folded_dep == NULL_TREE)
9469 folded_dep = c;
9470 folded_deps[0] = offset;
9472 else
9473 folded_deps[0] = wi::gcd (folded_deps[0],
9474 offset, UNSIGNED);
9477 /* Calculate minimum for the remaining dimensions. */
9478 else
9480 folded_deps[len + i - 1] = offset;
9481 if (folded_dep == c)
9482 folded_deps[i] = offset;
9483 else if (maybe_lexically_later
9484 && !wi::eq_p (folded_deps[i], offset))
9486 if (forward ^ wi::gts_p (folded_deps[i], offset))
9488 unsigned int j;
9489 folded_dep = c;
9490 for (j = 1; j <= i; j++)
9491 folded_deps[j] = folded_deps[len + j - 1];
9493 else
9494 maybe_lexically_later = false;
9498 gcc_assert (i == len);
9500 remove = true;
9502 next_ordered_clause:
9503 if (remove)
9504 *list_p = OMP_CLAUSE_CHAIN (c);
9505 else
9506 list_p = &OMP_CLAUSE_CHAIN (c);
9509 if (folded_dep)
9511 if (neg_offset_p)
9512 folded_deps[0] = -folded_deps[0];
9514 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9515 if (POINTER_TYPE_P (itype))
9516 itype = sizetype;
9518 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9519 = wide_int_to_tree (itype, folded_deps[0]);
9520 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9521 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9524 lower_omp_ordered_ret:
9526 /* Ordered without clauses is #pragma omp threads, while we want
9527 a nop instead if we remove all clauses. */
9528 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9529 gsi_replace (gsi_p, gimple_build_nop (), true);
9533 /* Expand code for an OpenMP ordered directive. */
9535 static void
9536 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9538 tree block;
9539 gimple *stmt = gsi_stmt (*gsi_p), *g;
9540 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9541 gcall *x;
9542 gbind *bind;
9543 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9544 OMP_CLAUSE_SIMD);
9545 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9546 loop. */
9547 bool maybe_simt
9548 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9549 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9550 OMP_CLAUSE_THREADS);
9552 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9553 OMP_CLAUSE_DEPEND))
9555 /* FIXME: This is needs to be moved to the expansion to verify various
9556 conditions only testable on cfg with dominators computed, and also
9557 all the depend clauses to be merged still might need to be available
9558 for the runtime checks. */
9559 if (0)
9560 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9561 return;
9564 push_gimplify_context ();
9566 block = make_node (BLOCK);
9567 bind = gimple_build_bind (NULL, NULL, block);
9568 gsi_replace (gsi_p, bind, true);
9569 gimple_bind_add_stmt (bind, stmt);
9571 if (simd)
9573 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9574 build_int_cst (NULL_TREE, threads));
9575 cfun->has_simduid_loops = true;
9577 else
9578 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9580 gimple_bind_add_stmt (bind, x);
9582 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9583 if (maybe_simt)
9585 counter = create_tmp_var (integer_type_node);
9586 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9587 gimple_call_set_lhs (g, counter);
9588 gimple_bind_add_stmt (bind, g);
9590 body = create_artificial_label (UNKNOWN_LOCATION);
9591 test = create_artificial_label (UNKNOWN_LOCATION);
9592 gimple_bind_add_stmt (bind, gimple_build_label (body));
9594 tree simt_pred = create_tmp_var (integer_type_node);
9595 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9596 gimple_call_set_lhs (g, simt_pred);
9597 gimple_bind_add_stmt (bind, g);
9599 tree t = create_artificial_label (UNKNOWN_LOCATION);
9600 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9601 gimple_bind_add_stmt (bind, g);
9603 gimple_bind_add_stmt (bind, gimple_build_label (t));
9605 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9606 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9607 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9608 gimple_omp_set_body (stmt, NULL);
9610 if (maybe_simt)
9612 gimple_bind_add_stmt (bind, gimple_build_label (test));
9613 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9614 gimple_bind_add_stmt (bind, g);
9616 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9617 tree nonneg = create_tmp_var (integer_type_node);
9618 gimple_seq tseq = NULL;
9619 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9620 gimple_bind_add_seq (bind, tseq);
9622 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9623 gimple_call_set_lhs (g, nonneg);
9624 gimple_bind_add_stmt (bind, g);
9626 tree end = create_artificial_label (UNKNOWN_LOCATION);
9627 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9628 gimple_bind_add_stmt (bind, g);
9630 gimple_bind_add_stmt (bind, gimple_build_label (end));
9632 if (simd)
9633 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9634 build_int_cst (NULL_TREE, threads));
9635 else
9636 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9638 gimple_bind_add_stmt (bind, x);
9640 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9642 pop_gimplify_context (bind);
9644 gimple_bind_append_vars (bind, ctx->block_vars);
9645 BLOCK_VARS (block) = gimple_bind_vars (bind);
9649 /* Expand code for an OpenMP scan directive and the structured block
9650 before the scan directive. */
9652 static void
9653 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9655 gimple *stmt = gsi_stmt (*gsi_p);
9656 bool has_clauses
9657 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9658 tree lane = NULL_TREE;
9659 gimple_seq before = NULL;
9660 omp_context *octx = ctx->outer;
9661 gcc_assert (octx);
9662 if (octx->scan_exclusive && !has_clauses)
9664 gimple_stmt_iterator gsi2 = *gsi_p;
9665 gsi_next (&gsi2);
9666 gimple *stmt2 = gsi_stmt (gsi2);
9667 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9668 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9669 the one with exclusive clause(s), comes first. */
9670 if (stmt2
9671 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9672 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9674 gsi_remove (gsi_p, false);
9675 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9676 ctx = maybe_lookup_ctx (stmt2);
9677 gcc_assert (ctx);
9678 lower_omp_scan (gsi_p, ctx);
9679 return;
9683 bool input_phase = has_clauses ^ octx->scan_inclusive;
9684 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9685 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9686 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9687 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9688 && !gimple_omp_for_combined_p (octx->stmt));
9689 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9690 if (is_for_simd && octx->for_simd_scan_phase)
9691 is_simd = false;
9692 if (is_simd)
9693 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9694 OMP_CLAUSE__SIMDUID_))
9696 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9697 lane = create_tmp_var (unsigned_type_node);
9698 tree t = build_int_cst (integer_type_node,
9699 input_phase ? 1
9700 : octx->scan_inclusive ? 2 : 3);
9701 gimple *g
9702 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9703 gimple_call_set_lhs (g, lane);
9704 gimple_seq_add_stmt (&before, g);
9707 if (is_simd || is_for)
9709 for (tree c = gimple_omp_for_clauses (octx->stmt);
9710 c; c = OMP_CLAUSE_CHAIN (c))
9711 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9712 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9714 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9715 tree var = OMP_CLAUSE_DECL (c);
9716 tree new_var = lookup_decl (var, octx);
9717 tree val = new_var;
9718 tree var2 = NULL_TREE;
9719 tree var3 = NULL_TREE;
9720 tree var4 = NULL_TREE;
9721 tree lane0 = NULL_TREE;
9722 tree new_vard = new_var;
9723 if (omp_is_reference (var))
9725 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9726 val = new_var;
9728 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9730 val = DECL_VALUE_EXPR (new_vard);
9731 if (new_vard != new_var)
9733 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9734 val = TREE_OPERAND (val, 0);
9736 if (TREE_CODE (val) == ARRAY_REF
9737 && VAR_P (TREE_OPERAND (val, 0)))
9739 tree v = TREE_OPERAND (val, 0);
9740 if (lookup_attribute ("omp simd array",
9741 DECL_ATTRIBUTES (v)))
9743 val = unshare_expr (val);
9744 lane0 = TREE_OPERAND (val, 1);
9745 TREE_OPERAND (val, 1) = lane;
9746 var2 = lookup_decl (v, octx);
9747 if (octx->scan_exclusive)
9748 var4 = lookup_decl (var2, octx);
9749 if (input_phase
9750 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9751 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9752 if (!input_phase)
9754 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9755 var2, lane, NULL_TREE, NULL_TREE);
9756 TREE_THIS_NOTRAP (var2) = 1;
9757 if (octx->scan_exclusive)
9759 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9760 var4, lane, NULL_TREE,
9761 NULL_TREE);
9762 TREE_THIS_NOTRAP (var4) = 1;
9765 else
9766 var2 = val;
9769 gcc_assert (var2);
9771 else
9773 var2 = build_outer_var_ref (var, octx);
9774 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9776 var3 = maybe_lookup_decl (new_vard, octx);
9777 if (var3 == new_vard || var3 == NULL_TREE)
9778 var3 = NULL_TREE;
9779 else if (is_simd && octx->scan_exclusive && !input_phase)
9781 var4 = maybe_lookup_decl (var3, octx);
9782 if (var4 == var3 || var4 == NULL_TREE)
9784 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9786 var4 = var3;
9787 var3 = NULL_TREE;
9789 else
9790 var4 = NULL_TREE;
9794 if (is_simd
9795 && octx->scan_exclusive
9796 && !input_phase
9797 && var4 == NULL_TREE)
9798 var4 = create_tmp_var (TREE_TYPE (val));
9800 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9802 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9803 if (input_phase)
9805 if (var3)
9807 /* If we've added a separate identity element
9808 variable, copy it over into val. */
9809 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9810 var3);
9811 gimplify_and_add (x, &before);
9813 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9815 /* Otherwise, assign to it the identity element. */
9816 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9817 if (is_for)
9818 tseq = copy_gimple_seq_and_replace_locals (tseq);
9819 tree ref = build_outer_var_ref (var, octx);
9820 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9821 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9822 if (x)
9824 if (new_vard != new_var)
9825 val = build_fold_addr_expr_loc (clause_loc, val);
9826 SET_DECL_VALUE_EXPR (new_vard, val);
9828 SET_DECL_VALUE_EXPR (placeholder, ref);
9829 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9830 lower_omp (&tseq, octx);
9831 if (x)
9832 SET_DECL_VALUE_EXPR (new_vard, x);
9833 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9834 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9835 gimple_seq_add_seq (&before, tseq);
9836 if (is_simd)
9837 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9840 else if (is_simd)
9842 tree x;
9843 if (octx->scan_exclusive)
9845 tree v4 = unshare_expr (var4);
9846 tree v2 = unshare_expr (var2);
9847 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9848 gimplify_and_add (x, &before);
9850 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9851 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9852 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9853 tree vexpr = val;
9854 if (x && new_vard != new_var)
9855 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9856 if (x)
9857 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9858 SET_DECL_VALUE_EXPR (placeholder, var2);
9859 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9860 lower_omp (&tseq, octx);
9861 gimple_seq_add_seq (&before, tseq);
9862 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9863 if (x)
9864 SET_DECL_VALUE_EXPR (new_vard, x);
9865 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9866 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9867 if (octx->scan_inclusive)
9869 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9870 var2);
9871 gimplify_and_add (x, &before);
9873 else if (lane0 == NULL_TREE)
9875 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9876 var4);
9877 gimplify_and_add (x, &before);
9881 else
9883 if (input_phase)
9885 /* input phase. Set val to initializer before
9886 the body. */
9887 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9888 gimplify_assign (val, x, &before);
9890 else if (is_simd)
9892 /* scan phase. */
9893 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9894 if (code == MINUS_EXPR)
9895 code = PLUS_EXPR;
9897 tree x = build2 (code, TREE_TYPE (var2),
9898 unshare_expr (var2), unshare_expr (val));
9899 if (octx->scan_inclusive)
9901 gimplify_assign (unshare_expr (var2), x, &before);
9902 gimplify_assign (val, var2, &before);
9904 else
9906 gimplify_assign (unshare_expr (var4),
9907 unshare_expr (var2), &before);
9908 gimplify_assign (var2, x, &before);
9909 if (lane0 == NULL_TREE)
9910 gimplify_assign (val, var4, &before);
9914 if (octx->scan_exclusive && !input_phase && lane0)
9916 tree vexpr = unshare_expr (var4);
9917 TREE_OPERAND (vexpr, 1) = lane0;
9918 if (new_vard != new_var)
9919 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9920 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9924 if (is_simd && !is_for_simd)
9926 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9927 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9928 gsi_replace (gsi_p, gimple_build_nop (), true);
9929 return;
9931 lower_omp (gimple_omp_body_ptr (stmt), octx);
9932 if (before)
9934 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9935 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9940 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9941 substitution of a couple of function calls. But in the NAMED case,
9942 requires that languages coordinate a symbol name. It is therefore
9943 best put here in common code. */
9945 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9947 static void
9948 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9950 tree block;
9951 tree name, lock, unlock;
9952 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9953 gbind *bind;
9954 location_t loc = gimple_location (stmt);
9955 gimple_seq tbody;
9957 name = gimple_omp_critical_name (stmt);
9958 if (name)
9960 tree decl;
9962 if (!critical_name_mutexes)
9963 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9965 tree *n = critical_name_mutexes->get (name);
9966 if (n == NULL)
9968 char *new_str;
9970 decl = create_tmp_var_raw (ptr_type_node);
9972 new_str = ACONCAT ((".gomp_critical_user_",
9973 IDENTIFIER_POINTER (name), NULL));
9974 DECL_NAME (decl) = get_identifier (new_str);
9975 TREE_PUBLIC (decl) = 1;
9976 TREE_STATIC (decl) = 1;
9977 DECL_COMMON (decl) = 1;
9978 DECL_ARTIFICIAL (decl) = 1;
9979 DECL_IGNORED_P (decl) = 1;
9981 varpool_node::finalize_decl (decl);
9983 critical_name_mutexes->put (name, decl);
9985 else
9986 decl = *n;
9988 /* If '#pragma omp critical' is inside offloaded region or
9989 inside function marked as offloadable, the symbol must be
9990 marked as offloadable too. */
9991 omp_context *octx;
9992 if (cgraph_node::get (current_function_decl)->offloadable)
9993 varpool_node::get_create (decl)->offloadable = 1;
9994 else
9995 for (octx = ctx->outer; octx; octx = octx->outer)
9996 if (is_gimple_omp_offloaded (octx->stmt))
9998 varpool_node::get_create (decl)->offloadable = 1;
9999 break;
10002 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10003 lock = build_call_expr_loc (loc, lock, 1,
10004 build_fold_addr_expr_loc (loc, decl));
10006 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10007 unlock = build_call_expr_loc (loc, unlock, 1,
10008 build_fold_addr_expr_loc (loc, decl));
10010 else
10012 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10013 lock = build_call_expr_loc (loc, lock, 0);
10015 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10016 unlock = build_call_expr_loc (loc, unlock, 0);
10019 push_gimplify_context ();
10021 block = make_node (BLOCK);
10022 bind = gimple_build_bind (NULL, NULL, block);
10023 gsi_replace (gsi_p, bind, true);
10024 gimple_bind_add_stmt (bind, stmt);
10026 tbody = gimple_bind_body (bind);
10027 gimplify_and_add (lock, &tbody);
10028 gimple_bind_set_body (bind, tbody);
10030 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10031 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10032 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10033 gimple_omp_set_body (stmt, NULL);
10035 tbody = gimple_bind_body (bind);
10036 gimplify_and_add (unlock, &tbody);
10037 gimple_bind_set_body (bind, tbody);
10039 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10041 pop_gimplify_context (bind);
10042 gimple_bind_append_vars (bind, ctx->block_vars);
10043 BLOCK_VARS (block) = gimple_bind_vars (bind);
10046 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10047 for a lastprivate clause. Given a loop control predicate of (V
10048 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10049 is appended to *DLIST, iterator initialization is appended to
10050 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10051 to be emitted in a critical section. */
10053 static void
10054 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10055 gimple_seq *dlist, gimple_seq *clist,
10056 struct omp_context *ctx)
10058 tree clauses, cond, vinit;
10059 enum tree_code cond_code;
10060 gimple_seq stmts;
10062 cond_code = fd->loop.cond_code;
10063 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10065 /* When possible, use a strict equality expression. This can let VRP
10066 type optimizations deduce the value and remove a copy. */
10067 if (tree_fits_shwi_p (fd->loop.step))
10069 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10070 if (step == 1 || step == -1)
10071 cond_code = EQ_EXPR;
10074 tree n2 = fd->loop.n2;
10075 if (fd->collapse > 1
10076 && TREE_CODE (n2) != INTEGER_CST
10077 && gimple_omp_for_combined_into_p (fd->for_stmt))
10079 struct omp_context *taskreg_ctx = NULL;
10080 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10082 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10083 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10084 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10086 if (gimple_omp_for_combined_into_p (gfor))
10088 gcc_assert (ctx->outer->outer
10089 && is_parallel_ctx (ctx->outer->outer));
10090 taskreg_ctx = ctx->outer->outer;
10092 else
10094 struct omp_for_data outer_fd;
10095 omp_extract_for_data (gfor, &outer_fd, NULL);
10096 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10099 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10100 taskreg_ctx = ctx->outer->outer;
10102 else if (is_taskreg_ctx (ctx->outer))
10103 taskreg_ctx = ctx->outer;
10104 if (taskreg_ctx)
10106 int i;
10107 tree taskreg_clauses
10108 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10109 tree innerc = omp_find_clause (taskreg_clauses,
10110 OMP_CLAUSE__LOOPTEMP_);
10111 gcc_assert (innerc);
10112 int count = fd->collapse;
10113 if (fd->non_rect
10114 && fd->last_nonrect == fd->first_nonrect + 1)
10115 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10116 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10117 count += 4;
10118 for (i = 0; i < count; i++)
10120 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10121 OMP_CLAUSE__LOOPTEMP_);
10122 gcc_assert (innerc);
10124 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10125 OMP_CLAUSE__LOOPTEMP_);
10126 if (innerc)
10127 n2 = fold_convert (TREE_TYPE (n2),
10128 lookup_decl (OMP_CLAUSE_DECL (innerc),
10129 taskreg_ctx));
10132 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10134 clauses = gimple_omp_for_clauses (fd->for_stmt);
10135 stmts = NULL;
10136 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10137 if (!gimple_seq_empty_p (stmts))
10139 gimple_seq_add_seq (&stmts, *dlist);
10140 *dlist = stmts;
10142 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10143 vinit = fd->loop.n1;
10144 if (cond_code == EQ_EXPR
10145 && tree_fits_shwi_p (fd->loop.n2)
10146 && ! integer_zerop (fd->loop.n2))
10147 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10148 else
10149 vinit = unshare_expr (vinit);
10151 /* Initialize the iterator variable, so that threads that don't execute
10152 any iterations don't execute the lastprivate clauses by accident. */
10153 gimplify_assign (fd->loop.v, vinit, body_p);
10157 /* OpenACC privatization.
10159 Or, in other words, *sharing* at the respective OpenACC level of
10160 parallelism.
10162 From a correctness perspective, a non-addressable variable can't be accessed
10163 outside the current thread, so it can go in a (faster than shared memory)
10164 register -- though that register may need to be broadcast in some
10165 circumstances. A variable can only meaningfully be "shared" across workers
10166 or vector lanes if its address is taken, e.g. by a call to an atomic
10167 builtin.
10169 From an optimisation perspective, the answer might be fuzzier: maybe
10170 sometimes, using shared memory directly would be faster than
10171 broadcasting. */
10173 static void
10174 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10175 const location_t loc, const tree c,
10176 const tree decl)
10178 const dump_user_location_t d_u_loc
10179 = dump_user_location_t::from_location_t (loc);
10180 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10181 #if __GNUC__ >= 10
10182 # pragma GCC diagnostic push
10183 # pragma GCC diagnostic ignored "-Wformat"
10184 #endif
10185 dump_printf_loc (l_dump_flags, d_u_loc,
10186 "variable %<%T%> ", decl);
10187 #if __GNUC__ >= 10
10188 # pragma GCC diagnostic pop
10189 #endif
10190 if (c)
10191 dump_printf (l_dump_flags,
10192 "in %qs clause ",
10193 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10194 else
10195 dump_printf (l_dump_flags,
10196 "declared in block ");
10199 static bool
10200 oacc_privatization_candidate_p (const location_t loc, const tree c,
10201 const tree decl)
10203 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10205 /* There is some differentiation depending on block vs. clause. */
10206 bool block = !c;
10208 bool res = true;
10210 if (res && !VAR_P (decl))
10212 res = false;
10214 if (dump_enabled_p ())
10216 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10217 dump_printf (l_dump_flags,
10218 "potentially has improper OpenACC privatization level: %qs\n",
10219 get_tree_code_name (TREE_CODE (decl)));
10223 if (res && block && TREE_STATIC (decl))
10225 res = false;
10227 if (dump_enabled_p ())
10229 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10230 dump_printf (l_dump_flags,
10231 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10232 "static");
10236 if (res && block && DECL_EXTERNAL (decl))
10238 res = false;
10240 if (dump_enabled_p ())
10242 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10243 dump_printf (l_dump_flags,
10244 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10245 "external");
10249 if (res && !TREE_ADDRESSABLE (decl))
10251 res = false;
10253 if (dump_enabled_p ())
10255 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10256 dump_printf (l_dump_flags,
10257 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10258 "not addressable");
10262 if (res)
10264 if (dump_enabled_p ())
10266 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10267 dump_printf (l_dump_flags,
10268 "is candidate for adjusting OpenACC privatization level\n");
10272 if (dump_file && (dump_flags & TDF_DETAILS))
10274 print_generic_decl (dump_file, decl, dump_flags);
10275 fprintf (dump_file, "\n");
10278 return res;
10281 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10282 CTX. */
10284 static void
10285 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10287 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10290 tree decl = OMP_CLAUSE_DECL (c);
10292 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10293 continue;
10295 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10296 ctx->oacc_privatization_candidates.safe_push (decl);
10300 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10301 CTX. */
10303 static void
10304 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10306 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10308 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10309 continue;
10311 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10312 ctx->oacc_privatization_candidates.safe_push (decl);
10316 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10318 static tree
10319 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10320 struct walk_stmt_info *wi)
10322 gimple *stmt = gsi_stmt (*gsi_p);
10324 *handled_ops_p = true;
10325 switch (gimple_code (stmt))
10327 WALK_SUBSTMTS;
10329 case GIMPLE_OMP_FOR:
10330 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10331 && gimple_omp_for_combined_into_p (stmt))
10332 *handled_ops_p = false;
10333 break;
10335 case GIMPLE_OMP_SCAN:
10336 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10337 return integer_zero_node;
10338 default:
10339 break;
10341 return NULL;
10344 /* Helper function for lower_omp_for, add transformations for a worksharing
10345 loop with scan directives inside of it.
10346 For worksharing loop not combined with simd, transform:
10347 #pragma omp for reduction(inscan,+:r) private(i)
10348 for (i = 0; i < n; i = i + 1)
10351 update (r);
10353 #pragma omp scan inclusive(r)
10355 use (r);
10359 into two worksharing loops + code to merge results:
10361 num_threads = omp_get_num_threads ();
10362 thread_num = omp_get_thread_num ();
10363 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10364 <D.2099>:
10365 var2 = r;
10366 goto <D.2101>;
10367 <D.2100>:
10368 // For UDRs this is UDR init, or if ctors are needed, copy from
10369 // var3 that has been constructed to contain the neutral element.
10370 var2 = 0;
10371 <D.2101>:
10372 ivar = 0;
10373 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10374 // a shared array with num_threads elements and rprivb to a local array
10375 // number of elements equal to the number of (contiguous) iterations the
10376 // current thread will perform. controlb and controlp variables are
10377 // temporaries to handle deallocation of rprivb at the end of second
10378 // GOMP_FOR.
10379 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10380 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10381 for (i = 0; i < n; i = i + 1)
10384 // For UDRs this is UDR init or copy from var3.
10385 r = 0;
10386 // This is the input phase from user code.
10387 update (r);
10390 // For UDRs this is UDR merge.
10391 var2 = var2 + r;
10392 // Rather than handing it over to the user, save to local thread's
10393 // array.
10394 rprivb[ivar] = var2;
10395 // For exclusive scan, the above two statements are swapped.
10396 ivar = ivar + 1;
10399 // And remember the final value from this thread's into the shared
10400 // rpriva array.
10401 rpriva[(sizetype) thread_num] = var2;
10402 // If more than one thread, compute using Work-Efficient prefix sum
10403 // the inclusive parallel scan of the rpriva array.
10404 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10405 <D.2102>:
10406 GOMP_barrier ();
10407 down = 0;
10408 k = 1;
10409 num_threadsu = (unsigned int) num_threads;
10410 thread_numup1 = (unsigned int) thread_num + 1;
10411 <D.2108>:
10412 twok = k << 1;
10413 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10414 <D.2110>:
10415 down = 4294967295;
10416 k = k >> 1;
10417 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10418 <D.2112>:
10419 k = k >> 1;
10420 <D.2111>:
10421 twok = k << 1;
10422 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10423 mul = REALPART_EXPR <cplx>;
10424 ovf = IMAGPART_EXPR <cplx>;
10425 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10426 <D.2116>:
10427 andv = k & down;
10428 andvm1 = andv + 4294967295;
10429 l = mul + andvm1;
10430 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10431 <D.2120>:
10432 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10433 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10434 rpriva[l] = rpriva[l - k] + rpriva[l];
10435 <D.2117>:
10436 if (down == 0) goto <D.2121>; else goto <D.2122>;
10437 <D.2121>:
10438 k = k << 1;
10439 goto <D.2123>;
10440 <D.2122>:
10441 k = k >> 1;
10442 <D.2123>:
10443 GOMP_barrier ();
10444 if (k != 0) goto <D.2108>; else goto <D.2103>;
10445 <D.2103>:
10446 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10447 <D.2124>:
10448 // For UDRs this is UDR init or copy from var3.
10449 var2 = 0;
10450 goto <D.2126>;
10451 <D.2125>:
10452 var2 = rpriva[thread_num - 1];
10453 <D.2126>:
10454 ivar = 0;
10455 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10456 reduction(inscan,+:r) private(i)
10457 for (i = 0; i < n; i = i + 1)
10460 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10461 r = var2 + rprivb[ivar];
10464 // This is the scan phase from user code.
10465 use (r);
10466 // Plus a bump of the iterator.
10467 ivar = ivar + 1;
10469 } */
10471 static void
10472 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10473 struct omp_for_data *fd, omp_context *ctx)
10475 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10476 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10478 gimple_seq body = gimple_omp_body (stmt);
10479 gimple_stmt_iterator input1_gsi = gsi_none ();
10480 struct walk_stmt_info wi;
10481 memset (&wi, 0, sizeof (wi));
10482 wi.val_only = true;
10483 wi.info = (void *) &input1_gsi;
10484 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10485 gcc_assert (!gsi_end_p (input1_gsi));
10487 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10488 gimple_stmt_iterator gsi = input1_gsi;
10489 gsi_next (&gsi);
10490 gimple_stmt_iterator scan1_gsi = gsi;
10491 gimple *scan_stmt1 = gsi_stmt (gsi);
10492 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10494 gimple_seq input_body = gimple_omp_body (input_stmt1);
10495 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10496 gimple_omp_set_body (input_stmt1, NULL);
10497 gimple_omp_set_body (scan_stmt1, NULL);
10498 gimple_omp_set_body (stmt, NULL);
10500 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10501 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10502 gimple_omp_set_body (stmt, body);
10503 gimple_omp_set_body (input_stmt1, input_body);
10505 gimple_stmt_iterator input2_gsi = gsi_none ();
10506 memset (&wi, 0, sizeof (wi));
10507 wi.val_only = true;
10508 wi.info = (void *) &input2_gsi;
10509 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10510 gcc_assert (!gsi_end_p (input2_gsi));
10512 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10513 gsi = input2_gsi;
10514 gsi_next (&gsi);
10515 gimple_stmt_iterator scan2_gsi = gsi;
10516 gimple *scan_stmt2 = gsi_stmt (gsi);
10517 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10518 gimple_omp_set_body (scan_stmt2, scan_body);
10520 gimple_stmt_iterator input3_gsi = gsi_none ();
10521 gimple_stmt_iterator scan3_gsi = gsi_none ();
10522 gimple_stmt_iterator input4_gsi = gsi_none ();
10523 gimple_stmt_iterator scan4_gsi = gsi_none ();
10524 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10525 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10526 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10527 if (is_for_simd)
10529 memset (&wi, 0, sizeof (wi));
10530 wi.val_only = true;
10531 wi.info = (void *) &input3_gsi;
10532 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10533 gcc_assert (!gsi_end_p (input3_gsi));
10535 input_stmt3 = gsi_stmt (input3_gsi);
10536 gsi = input3_gsi;
10537 gsi_next (&gsi);
10538 scan3_gsi = gsi;
10539 scan_stmt3 = gsi_stmt (gsi);
10540 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10542 memset (&wi, 0, sizeof (wi));
10543 wi.val_only = true;
10544 wi.info = (void *) &input4_gsi;
10545 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10546 gcc_assert (!gsi_end_p (input4_gsi));
10548 input_stmt4 = gsi_stmt (input4_gsi);
10549 gsi = input4_gsi;
10550 gsi_next (&gsi);
10551 scan4_gsi = gsi;
10552 scan_stmt4 = gsi_stmt (gsi);
10553 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10555 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10556 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10559 tree num_threads = create_tmp_var (integer_type_node);
10560 tree thread_num = create_tmp_var (integer_type_node);
10561 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10562 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10563 gimple *g = gimple_build_call (nthreads_decl, 0);
10564 gimple_call_set_lhs (g, num_threads);
10565 gimple_seq_add_stmt (body_p, g);
10566 g = gimple_build_call (threadnum_decl, 0);
10567 gimple_call_set_lhs (g, thread_num);
10568 gimple_seq_add_stmt (body_p, g);
10570 tree ivar = create_tmp_var (sizetype);
10571 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10572 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10573 tree k = create_tmp_var (unsigned_type_node);
10574 tree l = create_tmp_var (unsigned_type_node);
10576 gimple_seq clist = NULL, mdlist = NULL;
10577 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10578 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10579 gimple_seq scan1_list = NULL, input2_list = NULL;
10580 gimple_seq last_list = NULL, reduc_list = NULL;
10581 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10582 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10583 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10585 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10586 tree var = OMP_CLAUSE_DECL (c);
10587 tree new_var = lookup_decl (var, ctx);
10588 tree var3 = NULL_TREE;
10589 tree new_vard = new_var;
10590 if (omp_is_reference (var))
10591 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10592 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10594 var3 = maybe_lookup_decl (new_vard, ctx);
10595 if (var3 == new_vard)
10596 var3 = NULL_TREE;
10599 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10600 tree rpriva = create_tmp_var (ptype);
10601 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10602 OMP_CLAUSE_DECL (nc) = rpriva;
10603 *cp1 = nc;
10604 cp1 = &OMP_CLAUSE_CHAIN (nc);
10606 tree rprivb = create_tmp_var (ptype);
10607 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10608 OMP_CLAUSE_DECL (nc) = rprivb;
10609 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10610 *cp1 = nc;
10611 cp1 = &OMP_CLAUSE_CHAIN (nc);
10613 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10614 if (new_vard != new_var)
10615 TREE_ADDRESSABLE (var2) = 1;
10616 gimple_add_tmp_var (var2);
10618 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10619 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10620 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10621 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10622 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10624 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10625 thread_num, integer_minus_one_node);
10626 x = fold_convert_loc (clause_loc, sizetype, x);
10627 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10628 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10629 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10630 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10632 x = fold_convert_loc (clause_loc, sizetype, l);
10633 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10634 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10635 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10636 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10638 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10639 x = fold_convert_loc (clause_loc, sizetype, x);
10640 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10641 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10642 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10643 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10645 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10646 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10647 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10648 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10650 tree var4 = is_for_simd ? new_var : var2;
10651 tree var5 = NULL_TREE, var6 = NULL_TREE;
10652 if (is_for_simd)
10654 var5 = lookup_decl (var, input_simd_ctx);
10655 var6 = lookup_decl (var, scan_simd_ctx);
10656 if (new_vard != new_var)
10658 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10659 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10662 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10664 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10665 tree val = var2;
10667 x = lang_hooks.decls.omp_clause_default_ctor
10668 (c, var2, build_outer_var_ref (var, ctx));
10669 if (x)
10670 gimplify_and_add (x, &clist);
10672 x = build_outer_var_ref (var, ctx);
10673 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10675 gimplify_and_add (x, &thr01_list);
10677 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10678 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10679 if (var3)
10681 x = unshare_expr (var4);
10682 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10683 gimplify_and_add (x, &thrn1_list);
10684 x = unshare_expr (var4);
10685 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10686 gimplify_and_add (x, &thr02_list);
10688 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10690 /* Otherwise, assign to it the identity element. */
10691 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10692 tseq = copy_gimple_seq_and_replace_locals (tseq);
10693 if (!is_for_simd)
10695 if (new_vard != new_var)
10696 val = build_fold_addr_expr_loc (clause_loc, val);
10697 SET_DECL_VALUE_EXPR (new_vard, val);
10698 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10700 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10701 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10702 lower_omp (&tseq, ctx);
10703 gimple_seq_add_seq (&thrn1_list, tseq);
10704 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10705 lower_omp (&tseq, ctx);
10706 gimple_seq_add_seq (&thr02_list, tseq);
10707 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10708 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10709 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10710 if (y)
10711 SET_DECL_VALUE_EXPR (new_vard, y);
10712 else
10714 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10715 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10719 x = unshare_expr (var4);
10720 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10721 gimplify_and_add (x, &thrn2_list);
10723 if (is_for_simd)
10725 x = unshare_expr (rprivb_ref);
10726 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10727 gimplify_and_add (x, &scan1_list);
10729 else
10731 if (ctx->scan_exclusive)
10733 x = unshare_expr (rprivb_ref);
10734 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10735 gimplify_and_add (x, &scan1_list);
10738 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10739 tseq = copy_gimple_seq_and_replace_locals (tseq);
10740 SET_DECL_VALUE_EXPR (placeholder, var2);
10741 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10742 lower_omp (&tseq, ctx);
10743 gimple_seq_add_seq (&scan1_list, tseq);
10745 if (ctx->scan_inclusive)
10747 x = unshare_expr (rprivb_ref);
10748 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10749 gimplify_and_add (x, &scan1_list);
10753 x = unshare_expr (rpriva_ref);
10754 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10755 unshare_expr (var4));
10756 gimplify_and_add (x, &mdlist);
10758 x = unshare_expr (is_for_simd ? var6 : new_var);
10759 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10760 gimplify_and_add (x, &input2_list);
10762 val = rprivb_ref;
10763 if (new_vard != new_var)
10764 val = build_fold_addr_expr_loc (clause_loc, val);
10766 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10767 tseq = copy_gimple_seq_and_replace_locals (tseq);
10768 SET_DECL_VALUE_EXPR (new_vard, val);
10769 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10770 if (is_for_simd)
10772 SET_DECL_VALUE_EXPR (placeholder, var6);
10773 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10775 else
10776 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10777 lower_omp (&tseq, ctx);
10778 if (y)
10779 SET_DECL_VALUE_EXPR (new_vard, y);
10780 else
10782 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10783 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10785 if (!is_for_simd)
10787 SET_DECL_VALUE_EXPR (placeholder, new_var);
10788 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10789 lower_omp (&tseq, ctx);
10791 gimple_seq_add_seq (&input2_list, tseq);
10793 x = build_outer_var_ref (var, ctx);
10794 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10795 gimplify_and_add (x, &last_list);
10797 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10798 gimplify_and_add (x, &reduc_list);
10799 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10800 tseq = copy_gimple_seq_and_replace_locals (tseq);
10801 val = rprival_ref;
10802 if (new_vard != new_var)
10803 val = build_fold_addr_expr_loc (clause_loc, val);
10804 SET_DECL_VALUE_EXPR (new_vard, val);
10805 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10806 SET_DECL_VALUE_EXPR (placeholder, var2);
10807 lower_omp (&tseq, ctx);
10808 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10809 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10810 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10811 if (y)
10812 SET_DECL_VALUE_EXPR (new_vard, y);
10813 else
10815 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10816 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10818 gimple_seq_add_seq (&reduc_list, tseq);
10819 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10820 gimplify_and_add (x, &reduc_list);
10822 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10823 if (x)
10824 gimplify_and_add (x, dlist);
10826 else
10828 x = build_outer_var_ref (var, ctx);
10829 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10831 x = omp_reduction_init (c, TREE_TYPE (new_var));
10832 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10833 &thrn1_list);
10834 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10836 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10838 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10839 if (code == MINUS_EXPR)
10840 code = PLUS_EXPR;
10842 if (is_for_simd)
10843 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10844 else
10846 if (ctx->scan_exclusive)
10847 gimplify_assign (unshare_expr (rprivb_ref), var2,
10848 &scan1_list);
10849 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10850 gimplify_assign (var2, x, &scan1_list);
10851 if (ctx->scan_inclusive)
10852 gimplify_assign (unshare_expr (rprivb_ref), var2,
10853 &scan1_list);
10856 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10857 &mdlist);
10859 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10860 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10862 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10863 &last_list);
10865 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10866 unshare_expr (rprival_ref));
10867 gimplify_assign (rprival_ref, x, &reduc_list);
10871 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10872 gimple_seq_add_stmt (&scan1_list, g);
10873 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10874 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10875 ? scan_stmt4 : scan_stmt2), g);
10877 tree controlb = create_tmp_var (boolean_type_node);
10878 tree controlp = create_tmp_var (ptr_type_node);
10879 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10880 OMP_CLAUSE_DECL (nc) = controlb;
10881 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10882 *cp1 = nc;
10883 cp1 = &OMP_CLAUSE_CHAIN (nc);
10884 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10885 OMP_CLAUSE_DECL (nc) = controlp;
10886 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10887 *cp1 = nc;
10888 cp1 = &OMP_CLAUSE_CHAIN (nc);
10889 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10890 OMP_CLAUSE_DECL (nc) = controlb;
10891 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10892 *cp2 = nc;
10893 cp2 = &OMP_CLAUSE_CHAIN (nc);
10894 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10895 OMP_CLAUSE_DECL (nc) = controlp;
10896 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10897 *cp2 = nc;
10898 cp2 = &OMP_CLAUSE_CHAIN (nc);
10900 *cp1 = gimple_omp_for_clauses (stmt);
10901 gimple_omp_for_set_clauses (stmt, new_clauses1);
10902 *cp2 = gimple_omp_for_clauses (new_stmt);
10903 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10905 if (is_for_simd)
10907 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10908 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10910 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10911 GSI_SAME_STMT);
10912 gsi_remove (&input3_gsi, true);
10913 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10914 GSI_SAME_STMT);
10915 gsi_remove (&scan3_gsi, true);
10916 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10917 GSI_SAME_STMT);
10918 gsi_remove (&input4_gsi, true);
10919 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10920 GSI_SAME_STMT);
10921 gsi_remove (&scan4_gsi, true);
10923 else
10925 gimple_omp_set_body (scan_stmt1, scan1_list);
10926 gimple_omp_set_body (input_stmt2, input2_list);
10929 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10930 GSI_SAME_STMT);
10931 gsi_remove (&input1_gsi, true);
10932 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10933 GSI_SAME_STMT);
10934 gsi_remove (&scan1_gsi, true);
10935 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10936 GSI_SAME_STMT);
10937 gsi_remove (&input2_gsi, true);
10938 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10939 GSI_SAME_STMT);
10940 gsi_remove (&scan2_gsi, true);
10942 gimple_seq_add_seq (body_p, clist);
10944 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10945 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10946 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10947 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10948 gimple_seq_add_stmt (body_p, g);
10949 g = gimple_build_label (lab1);
10950 gimple_seq_add_stmt (body_p, g);
10951 gimple_seq_add_seq (body_p, thr01_list);
10952 g = gimple_build_goto (lab3);
10953 gimple_seq_add_stmt (body_p, g);
10954 g = gimple_build_label (lab2);
10955 gimple_seq_add_stmt (body_p, g);
10956 gimple_seq_add_seq (body_p, thrn1_list);
10957 g = gimple_build_label (lab3);
10958 gimple_seq_add_stmt (body_p, g);
10960 g = gimple_build_assign (ivar, size_zero_node);
10961 gimple_seq_add_stmt (body_p, g);
10963 gimple_seq_add_stmt (body_p, stmt);
10964 gimple_seq_add_seq (body_p, body);
10965 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10966 fd->loop.v));
10968 g = gimple_build_omp_return (true);
10969 gimple_seq_add_stmt (body_p, g);
10970 gimple_seq_add_seq (body_p, mdlist);
10972 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10973 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10974 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10975 gimple_seq_add_stmt (body_p, g);
10976 g = gimple_build_label (lab1);
10977 gimple_seq_add_stmt (body_p, g);
10979 g = omp_build_barrier (NULL);
10980 gimple_seq_add_stmt (body_p, g);
10982 tree down = create_tmp_var (unsigned_type_node);
10983 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10984 gimple_seq_add_stmt (body_p, g);
10986 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10987 gimple_seq_add_stmt (body_p, g);
10989 tree num_threadsu = create_tmp_var (unsigned_type_node);
10990 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10991 gimple_seq_add_stmt (body_p, g);
10993 tree thread_numu = create_tmp_var (unsigned_type_node);
10994 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10995 gimple_seq_add_stmt (body_p, g);
10997 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10998 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10999 build_int_cst (unsigned_type_node, 1));
11000 gimple_seq_add_stmt (body_p, g);
11002 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11003 g = gimple_build_label (lab3);
11004 gimple_seq_add_stmt (body_p, g);
11006 tree twok = create_tmp_var (unsigned_type_node);
11007 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11008 gimple_seq_add_stmt (body_p, g);
11010 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11011 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11012 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11013 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11014 gimple_seq_add_stmt (body_p, g);
11015 g = gimple_build_label (lab4);
11016 gimple_seq_add_stmt (body_p, g);
11017 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11018 gimple_seq_add_stmt (body_p, g);
11019 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11020 gimple_seq_add_stmt (body_p, g);
11022 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11023 gimple_seq_add_stmt (body_p, g);
11024 g = gimple_build_label (lab6);
11025 gimple_seq_add_stmt (body_p, g);
11027 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11028 gimple_seq_add_stmt (body_p, g);
11030 g = gimple_build_label (lab5);
11031 gimple_seq_add_stmt (body_p, g);
11033 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11034 gimple_seq_add_stmt (body_p, g);
11036 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11037 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11038 gimple_call_set_lhs (g, cplx);
11039 gimple_seq_add_stmt (body_p, g);
11040 tree mul = create_tmp_var (unsigned_type_node);
11041 g = gimple_build_assign (mul, REALPART_EXPR,
11042 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11043 gimple_seq_add_stmt (body_p, g);
11044 tree ovf = create_tmp_var (unsigned_type_node);
11045 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11046 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11047 gimple_seq_add_stmt (body_p, g);
11049 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11050 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11051 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11052 lab7, lab8);
11053 gimple_seq_add_stmt (body_p, g);
11054 g = gimple_build_label (lab7);
11055 gimple_seq_add_stmt (body_p, g);
11057 tree andv = create_tmp_var (unsigned_type_node);
11058 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11059 gimple_seq_add_stmt (body_p, g);
11060 tree andvm1 = create_tmp_var (unsigned_type_node);
11061 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11062 build_minus_one_cst (unsigned_type_node));
11063 gimple_seq_add_stmt (body_p, g);
11065 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11066 gimple_seq_add_stmt (body_p, g);
11068 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11069 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11070 gimple_seq_add_stmt (body_p, g);
11071 g = gimple_build_label (lab9);
11072 gimple_seq_add_stmt (body_p, g);
11073 gimple_seq_add_seq (body_p, reduc_list);
11074 g = gimple_build_label (lab8);
11075 gimple_seq_add_stmt (body_p, g);
11077 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11078 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11079 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11080 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11081 lab10, lab11);
11082 gimple_seq_add_stmt (body_p, g);
11083 g = gimple_build_label (lab10);
11084 gimple_seq_add_stmt (body_p, g);
11085 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11086 gimple_seq_add_stmt (body_p, g);
11087 g = gimple_build_goto (lab12);
11088 gimple_seq_add_stmt (body_p, g);
11089 g = gimple_build_label (lab11);
11090 gimple_seq_add_stmt (body_p, g);
11091 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11092 gimple_seq_add_stmt (body_p, g);
11093 g = gimple_build_label (lab12);
11094 gimple_seq_add_stmt (body_p, g);
11096 g = omp_build_barrier (NULL);
11097 gimple_seq_add_stmt (body_p, g);
11099 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11100 lab3, lab2);
11101 gimple_seq_add_stmt (body_p, g);
11103 g = gimple_build_label (lab2);
11104 gimple_seq_add_stmt (body_p, g);
11106 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11107 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11108 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11109 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11110 gimple_seq_add_stmt (body_p, g);
11111 g = gimple_build_label (lab1);
11112 gimple_seq_add_stmt (body_p, g);
11113 gimple_seq_add_seq (body_p, thr02_list);
11114 g = gimple_build_goto (lab3);
11115 gimple_seq_add_stmt (body_p, g);
11116 g = gimple_build_label (lab2);
11117 gimple_seq_add_stmt (body_p, g);
11118 gimple_seq_add_seq (body_p, thrn2_list);
11119 g = gimple_build_label (lab3);
11120 gimple_seq_add_stmt (body_p, g);
11122 g = gimple_build_assign (ivar, size_zero_node);
11123 gimple_seq_add_stmt (body_p, g);
11124 gimple_seq_add_stmt (body_p, new_stmt);
11125 gimple_seq_add_seq (body_p, new_body);
11127 gimple_seq new_dlist = NULL;
11128 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11129 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11130 tree num_threadsm1 = create_tmp_var (integer_type_node);
11131 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11132 integer_minus_one_node);
11133 gimple_seq_add_stmt (&new_dlist, g);
11134 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11135 gimple_seq_add_stmt (&new_dlist, g);
11136 g = gimple_build_label (lab1);
11137 gimple_seq_add_stmt (&new_dlist, g);
11138 gimple_seq_add_seq (&new_dlist, last_list);
11139 g = gimple_build_label (lab2);
11140 gimple_seq_add_stmt (&new_dlist, g);
11141 gimple_seq_add_seq (&new_dlist, *dlist);
11142 *dlist = new_dlist;
11145 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11146 the addresses of variables to be made private at the surrounding
11147 parallelism level. Such functions appear in the gimple code stream in two
11148 forms, e.g. for a partitioned loop:
11150 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11151 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11152 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11153 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11155 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11156 not as part of a HEAD_MARK sequence:
11158 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11160 For such stand-alone appearances, the 3rd argument is always 0, denoting
11161 gang partitioning. */
11163 static gcall *
11164 lower_oacc_private_marker (omp_context *ctx)
11166 if (ctx->oacc_privatization_candidates.length () == 0)
11167 return NULL;
11169 auto_vec<tree, 5> args;
11171 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11172 args.quick_push (integer_zero_node);
11173 args.quick_push (integer_minus_one_node);
11175 int i;
11176 tree decl;
11177 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11179 for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11181 tree inner_decl = maybe_lookup_decl (decl, thisctx);
11182 if (inner_decl)
11184 decl = inner_decl;
11185 break;
11188 gcc_checking_assert (decl);
11190 tree addr = build_fold_addr_expr (decl);
11191 args.safe_push (addr);
11194 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11197 /* Lower code for an OMP loop directive. */
11199 static void
11200 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11202 tree *rhs_p, block;
11203 struct omp_for_data fd, *fdp = NULL;
11204 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11205 gbind *new_stmt;
11206 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11207 gimple_seq cnt_list = NULL, clist = NULL;
11208 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11209 size_t i;
11211 push_gimplify_context ();
11213 if (is_gimple_omp_oacc (ctx->stmt))
11214 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11216 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11218 block = make_node (BLOCK);
11219 new_stmt = gimple_build_bind (NULL, NULL, block);
11220 /* Replace at gsi right away, so that 'stmt' is no member
11221 of a sequence anymore as we're going to add to a different
11222 one below. */
11223 gsi_replace (gsi_p, new_stmt, true);
11225 /* Move declaration of temporaries in the loop body before we make
11226 it go away. */
11227 omp_for_body = gimple_omp_body (stmt);
11228 if (!gimple_seq_empty_p (omp_for_body)
11229 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11231 gbind *inner_bind
11232 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11233 tree vars = gimple_bind_vars (inner_bind);
11234 if (is_gimple_omp_oacc (ctx->stmt))
11235 oacc_privatization_scan_decl_chain (ctx, vars);
11236 gimple_bind_append_vars (new_stmt, vars);
11237 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11238 keep them on the inner_bind and it's block. */
11239 gimple_bind_set_vars (inner_bind, NULL_TREE);
11240 if (gimple_bind_block (inner_bind))
11241 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11244 if (gimple_omp_for_combined_into_p (stmt))
11246 omp_extract_for_data (stmt, &fd, NULL);
11247 fdp = &fd;
11249 /* We need two temporaries with fd.loop.v type (istart/iend)
11250 and then (fd.collapse - 1) temporaries with the same
11251 type for count2 ... countN-1 vars if not constant. */
11252 size_t count = 2;
11253 tree type = fd.iter_type;
11254 if (fd.collapse > 1
11255 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11256 count += fd.collapse - 1;
11257 size_t count2 = 0;
11258 tree type2 = NULL_TREE;
11259 bool taskreg_for
11260 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11261 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11262 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11263 tree simtc = NULL;
11264 tree clauses = *pc;
11265 if (fd.collapse > 1
11266 && fd.non_rect
11267 && fd.last_nonrect == fd.first_nonrect + 1
11268 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11269 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11270 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11272 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11273 type2 = TREE_TYPE (v);
11274 count++;
11275 count2 = 3;
11277 if (taskreg_for)
11278 outerc
11279 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11280 OMP_CLAUSE__LOOPTEMP_);
11281 if (ctx->simt_stmt)
11282 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11283 OMP_CLAUSE__LOOPTEMP_);
11284 for (i = 0; i < count + count2; i++)
11286 tree temp;
11287 if (taskreg_for)
11289 gcc_assert (outerc);
11290 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11291 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11292 OMP_CLAUSE__LOOPTEMP_);
11294 else
11296 /* If there are 2 adjacent SIMD stmts, one with _simt_
11297 clause, another without, make sure they have the same
11298 decls in _looptemp_ clauses, because the outer stmt
11299 they are combined into will look up just one inner_stmt. */
11300 if (ctx->simt_stmt)
11301 temp = OMP_CLAUSE_DECL (simtc);
11302 else
11303 temp = create_tmp_var (i >= count ? type2 : type);
11304 insert_decl_map (&ctx->outer->cb, temp, temp);
11306 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11307 OMP_CLAUSE_DECL (*pc) = temp;
11308 pc = &OMP_CLAUSE_CHAIN (*pc);
11309 if (ctx->simt_stmt)
11310 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11311 OMP_CLAUSE__LOOPTEMP_);
11313 *pc = clauses;
11316 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11317 dlist = NULL;
11318 body = NULL;
11319 tree rclauses
11320 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11321 OMP_CLAUSE_REDUCTION);
11322 tree rtmp = NULL_TREE;
11323 if (rclauses)
11325 tree type = build_pointer_type (pointer_sized_int_node);
11326 tree temp = create_tmp_var (type);
11327 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11328 OMP_CLAUSE_DECL (c) = temp;
11329 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11330 gimple_omp_for_set_clauses (stmt, c);
11331 lower_omp_task_reductions (ctx, OMP_FOR,
11332 gimple_omp_for_clauses (stmt),
11333 &tred_ilist, &tred_dlist);
11334 rclauses = c;
11335 rtmp = make_ssa_name (type);
11336 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11339 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11340 ctx);
11342 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11343 fdp);
11344 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11345 gimple_omp_for_pre_body (stmt));
11347 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11349 gcall *private_marker = NULL;
11350 if (is_gimple_omp_oacc (ctx->stmt)
11351 && !gimple_seq_empty_p (omp_for_body))
11352 private_marker = lower_oacc_private_marker (ctx);
11354 /* Lower the header expressions. At this point, we can assume that
11355 the header is of the form:
11357 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11359 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11360 using the .omp_data_s mapping, if needed. */
11361 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11363 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11364 if (TREE_CODE (*rhs_p) == TREE_VEC)
11366 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11367 TREE_VEC_ELT (*rhs_p, 1)
11368 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11369 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11370 TREE_VEC_ELT (*rhs_p, 2)
11371 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11373 else if (!is_gimple_min_invariant (*rhs_p))
11374 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11375 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11376 recompute_tree_invariant_for_addr_expr (*rhs_p);
11378 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11379 if (TREE_CODE (*rhs_p) == TREE_VEC)
11381 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11382 TREE_VEC_ELT (*rhs_p, 1)
11383 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11384 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11385 TREE_VEC_ELT (*rhs_p, 2)
11386 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11388 else if (!is_gimple_min_invariant (*rhs_p))
11389 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11390 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11391 recompute_tree_invariant_for_addr_expr (*rhs_p);
11393 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11394 if (!is_gimple_min_invariant (*rhs_p))
11395 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11397 if (rclauses)
11398 gimple_seq_add_seq (&tred_ilist, cnt_list);
11399 else
11400 gimple_seq_add_seq (&body, cnt_list);
11402 /* Once lowered, extract the bounds and clauses. */
11403 omp_extract_for_data (stmt, &fd, NULL);
11405 if (is_gimple_omp_oacc (ctx->stmt)
11406 && !ctx_in_oacc_kernels_region (ctx))
11407 lower_oacc_head_tail (gimple_location (stmt),
11408 gimple_omp_for_clauses (stmt), private_marker,
11409 &oacc_head, &oacc_tail, ctx);
11411 /* Add OpenACC partitioning and reduction markers just before the loop. */
11412 if (oacc_head)
11413 gimple_seq_add_seq (&body, oacc_head);
11415 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11417 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11418 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11419 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11420 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11422 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11423 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11424 OMP_CLAUSE_LINEAR_STEP (c)
11425 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11426 ctx);
11429 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11430 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11431 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11432 else
11434 gimple_seq_add_stmt (&body, stmt);
11435 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11438 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11439 fd.loop.v));
11441 /* After the loop, add exit clauses. */
11442 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11444 if (clist)
11446 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11447 gcall *g = gimple_build_call (fndecl, 0);
11448 gimple_seq_add_stmt (&body, g);
11449 gimple_seq_add_seq (&body, clist);
11450 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11451 g = gimple_build_call (fndecl, 0);
11452 gimple_seq_add_stmt (&body, g);
11455 if (ctx->cancellable)
11456 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11458 gimple_seq_add_seq (&body, dlist);
11460 if (rclauses)
11462 gimple_seq_add_seq (&tred_ilist, body);
11463 body = tred_ilist;
11466 body = maybe_catch_exception (body);
11468 /* Region exit marker goes at the end of the loop body. */
11469 gimple *g = gimple_build_omp_return (fd.have_nowait);
11470 gimple_seq_add_stmt (&body, g);
11472 gimple_seq_add_seq (&body, tred_dlist);
11474 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11476 if (rclauses)
11477 OMP_CLAUSE_DECL (rclauses) = rtmp;
11479 /* Add OpenACC joining and reduction markers just after the loop. */
11480 if (oacc_tail)
11481 gimple_seq_add_seq (&body, oacc_tail);
11483 pop_gimplify_context (new_stmt);
11485 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11486 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11487 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11488 if (BLOCK_VARS (block))
11489 TREE_USED (block) = 1;
11491 gimple_bind_set_body (new_stmt, body);
11492 gimple_omp_set_body (stmt, NULL);
11493 gimple_omp_for_set_pre_body (stmt, NULL);
11496 /* Callback for walk_stmts. Check if the current statement only contains
11497 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11499 static tree
11500 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11501 bool *handled_ops_p,
11502 struct walk_stmt_info *wi)
11504 int *info = (int *) wi->info;
11505 gimple *stmt = gsi_stmt (*gsi_p);
11507 *handled_ops_p = true;
11508 switch (gimple_code (stmt))
11510 WALK_SUBSTMTS;
11512 case GIMPLE_DEBUG:
11513 break;
11514 case GIMPLE_OMP_FOR:
11515 case GIMPLE_OMP_SECTIONS:
11516 *info = *info == 0 ? 1 : -1;
11517 break;
11518 default:
11519 *info = -1;
11520 break;
11522 return NULL;
11525 struct omp_taskcopy_context
11527 /* This field must be at the beginning, as we do "inheritance": Some
11528 callback functions for tree-inline.c (e.g., omp_copy_decl)
11529 receive a copy_body_data pointer that is up-casted to an
11530 omp_context pointer. */
11531 copy_body_data cb;
11532 omp_context *ctx;
11535 static tree
11536 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11538 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11540 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11541 return create_tmp_var (TREE_TYPE (var));
11543 return var;
11546 static tree
11547 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11549 tree name, new_fields = NULL, type, f;
11551 type = lang_hooks.types.make_type (RECORD_TYPE);
11552 name = DECL_NAME (TYPE_NAME (orig_type));
11553 name = build_decl (gimple_location (tcctx->ctx->stmt),
11554 TYPE_DECL, name, type);
11555 TYPE_NAME (type) = name;
11557 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11559 tree new_f = copy_node (f);
11560 DECL_CONTEXT (new_f) = type;
11561 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11562 TREE_CHAIN (new_f) = new_fields;
11563 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11564 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11565 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11566 &tcctx->cb, NULL);
11567 new_fields = new_f;
11568 tcctx->cb.decl_map->put (f, new_f);
11570 TYPE_FIELDS (type) = nreverse (new_fields);
11571 layout_type (type);
11572 return type;
11575 /* Create task copyfn. */
11577 static void
11578 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11580 struct function *child_cfun;
11581 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11582 tree record_type, srecord_type, bind, list;
11583 bool record_needs_remap = false, srecord_needs_remap = false;
11584 splay_tree_node n;
11585 struct omp_taskcopy_context tcctx;
11586 location_t loc = gimple_location (task_stmt);
11587 size_t looptempno = 0;
11589 child_fn = gimple_omp_task_copy_fn (task_stmt);
11590 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11591 gcc_assert (child_cfun->cfg == NULL);
11592 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11594 /* Reset DECL_CONTEXT on function arguments. */
11595 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11596 DECL_CONTEXT (t) = child_fn;
11598 /* Populate the function. */
11599 push_gimplify_context ();
11600 push_cfun (child_cfun);
11602 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11603 TREE_SIDE_EFFECTS (bind) = 1;
11604 list = NULL;
11605 DECL_SAVED_TREE (child_fn) = bind;
11606 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11608 /* Remap src and dst argument types if needed. */
11609 record_type = ctx->record_type;
11610 srecord_type = ctx->srecord_type;
11611 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11612 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11614 record_needs_remap = true;
11615 break;
11617 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11618 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11620 srecord_needs_remap = true;
11621 break;
11624 if (record_needs_remap || srecord_needs_remap)
11626 memset (&tcctx, '\0', sizeof (tcctx));
11627 tcctx.cb.src_fn = ctx->cb.src_fn;
11628 tcctx.cb.dst_fn = child_fn;
11629 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11630 gcc_checking_assert (tcctx.cb.src_node);
11631 tcctx.cb.dst_node = tcctx.cb.src_node;
11632 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11633 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11634 tcctx.cb.eh_lp_nr = 0;
11635 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11636 tcctx.cb.decl_map = new hash_map<tree, tree>;
11637 tcctx.ctx = ctx;
11639 if (record_needs_remap)
11640 record_type = task_copyfn_remap_type (&tcctx, record_type);
11641 if (srecord_needs_remap)
11642 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11644 else
11645 tcctx.cb.decl_map = NULL;
11647 arg = DECL_ARGUMENTS (child_fn);
11648 TREE_TYPE (arg) = build_pointer_type (record_type);
11649 sarg = DECL_CHAIN (arg);
11650 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11652 /* First pass: initialize temporaries used in record_type and srecord_type
11653 sizes and field offsets. */
11654 if (tcctx.cb.decl_map)
11655 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11656 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11658 tree *p;
11660 decl = OMP_CLAUSE_DECL (c);
11661 p = tcctx.cb.decl_map->get (decl);
11662 if (p == NULL)
11663 continue;
11664 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11665 sf = (tree) n->value;
11666 sf = *tcctx.cb.decl_map->get (sf);
11667 src = build_simple_mem_ref_loc (loc, sarg);
11668 src = omp_build_component_ref (src, sf);
11669 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11670 append_to_statement_list (t, &list);
11673 /* Second pass: copy shared var pointers and copy construct non-VLA
11674 firstprivate vars. */
11675 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11676 switch (OMP_CLAUSE_CODE (c))
11678 splay_tree_key key;
11679 case OMP_CLAUSE_SHARED:
11680 decl = OMP_CLAUSE_DECL (c);
11681 key = (splay_tree_key) decl;
11682 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11683 key = (splay_tree_key) &DECL_UID (decl);
11684 n = splay_tree_lookup (ctx->field_map, key);
11685 if (n == NULL)
11686 break;
11687 f = (tree) n->value;
11688 if (tcctx.cb.decl_map)
11689 f = *tcctx.cb.decl_map->get (f);
11690 n = splay_tree_lookup (ctx->sfield_map, key);
11691 sf = (tree) n->value;
11692 if (tcctx.cb.decl_map)
11693 sf = *tcctx.cb.decl_map->get (sf);
11694 src = build_simple_mem_ref_loc (loc, sarg);
11695 src = omp_build_component_ref (src, sf);
11696 dst = build_simple_mem_ref_loc (loc, arg);
11697 dst = omp_build_component_ref (dst, f);
11698 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11699 append_to_statement_list (t, &list);
11700 break;
11701 case OMP_CLAUSE_REDUCTION:
11702 case OMP_CLAUSE_IN_REDUCTION:
11703 decl = OMP_CLAUSE_DECL (c);
11704 if (TREE_CODE (decl) == MEM_REF)
11706 decl = TREE_OPERAND (decl, 0);
11707 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11708 decl = TREE_OPERAND (decl, 0);
11709 if (TREE_CODE (decl) == INDIRECT_REF
11710 || TREE_CODE (decl) == ADDR_EXPR)
11711 decl = TREE_OPERAND (decl, 0);
11713 key = (splay_tree_key) decl;
11714 n = splay_tree_lookup (ctx->field_map, key);
11715 if (n == NULL)
11716 break;
11717 f = (tree) n->value;
11718 if (tcctx.cb.decl_map)
11719 f = *tcctx.cb.decl_map->get (f);
11720 n = splay_tree_lookup (ctx->sfield_map, key);
11721 sf = (tree) n->value;
11722 if (tcctx.cb.decl_map)
11723 sf = *tcctx.cb.decl_map->get (sf);
11724 src = build_simple_mem_ref_loc (loc, sarg);
11725 src = omp_build_component_ref (src, sf);
11726 if (decl != OMP_CLAUSE_DECL (c)
11727 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11728 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11729 src = build_simple_mem_ref_loc (loc, src);
11730 dst = build_simple_mem_ref_loc (loc, arg);
11731 dst = omp_build_component_ref (dst, f);
11732 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11733 append_to_statement_list (t, &list);
11734 break;
11735 case OMP_CLAUSE__LOOPTEMP_:
11736 /* Fields for first two _looptemp_ clauses are initialized by
11737 GOMP_taskloop*, the rest are handled like firstprivate. */
11738 if (looptempno < 2)
11740 looptempno++;
11741 break;
11743 /* FALLTHRU */
11744 case OMP_CLAUSE__REDUCTEMP_:
11745 case OMP_CLAUSE_FIRSTPRIVATE:
11746 decl = OMP_CLAUSE_DECL (c);
11747 if (is_variable_sized (decl))
11748 break;
11749 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11750 if (n == NULL)
11751 break;
11752 f = (tree) n->value;
11753 if (tcctx.cb.decl_map)
11754 f = *tcctx.cb.decl_map->get (f);
11755 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11756 if (n != NULL)
11758 sf = (tree) n->value;
11759 if (tcctx.cb.decl_map)
11760 sf = *tcctx.cb.decl_map->get (sf);
11761 src = build_simple_mem_ref_loc (loc, sarg);
11762 src = omp_build_component_ref (src, sf);
11763 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11764 src = build_simple_mem_ref_loc (loc, src);
11766 else
11767 src = decl;
11768 dst = build_simple_mem_ref_loc (loc, arg);
11769 dst = omp_build_component_ref (dst, f);
11770 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11771 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11772 else
11774 if (ctx->allocate_map)
11775 if (tree *allocatorp = ctx->allocate_map->get (decl))
11777 tree allocator = *allocatorp;
11778 if (TREE_CODE (allocator) != INTEGER_CST)
11780 n = splay_tree_lookup (ctx->sfield_map,
11781 (splay_tree_key) allocator);
11782 allocator = (tree) n->value;
11783 if (tcctx.cb.decl_map)
11784 allocator = *tcctx.cb.decl_map->get (allocator);
11785 tree a = build_simple_mem_ref_loc (loc, sarg);
11786 allocator = omp_build_component_ref (a, allocator);
11788 allocator = fold_convert (pointer_sized_int_node, allocator);
11789 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
11790 tree align = build_int_cst (size_type_node,
11791 DECL_ALIGN_UNIT (decl));
11792 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
11793 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
11794 allocator);
11795 ptr = fold_convert (TREE_TYPE (dst), ptr);
11796 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
11797 append_to_statement_list (t, &list);
11798 dst = build_simple_mem_ref_loc (loc, dst);
11800 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11802 append_to_statement_list (t, &list);
11803 break;
11804 case OMP_CLAUSE_PRIVATE:
11805 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11806 break;
11807 decl = OMP_CLAUSE_DECL (c);
11808 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11809 f = (tree) n->value;
11810 if (tcctx.cb.decl_map)
11811 f = *tcctx.cb.decl_map->get (f);
11812 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11813 if (n != NULL)
11815 sf = (tree) n->value;
11816 if (tcctx.cb.decl_map)
11817 sf = *tcctx.cb.decl_map->get (sf);
11818 src = build_simple_mem_ref_loc (loc, sarg);
11819 src = omp_build_component_ref (src, sf);
11820 if (use_pointer_for_field (decl, NULL))
11821 src = build_simple_mem_ref_loc (loc, src);
11823 else
11824 src = decl;
11825 dst = build_simple_mem_ref_loc (loc, arg);
11826 dst = omp_build_component_ref (dst, f);
11827 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11828 append_to_statement_list (t, &list);
11829 break;
11830 default:
11831 break;
11834 /* Last pass: handle VLA firstprivates. */
11835 if (tcctx.cb.decl_map)
11836 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11837 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11839 tree ind, ptr, df;
11841 decl = OMP_CLAUSE_DECL (c);
11842 if (!is_variable_sized (decl))
11843 continue;
11844 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11845 if (n == NULL)
11846 continue;
11847 f = (tree) n->value;
11848 f = *tcctx.cb.decl_map->get (f);
11849 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11850 ind = DECL_VALUE_EXPR (decl);
11851 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11852 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11853 n = splay_tree_lookup (ctx->sfield_map,
11854 (splay_tree_key) TREE_OPERAND (ind, 0));
11855 sf = (tree) n->value;
11856 sf = *tcctx.cb.decl_map->get (sf);
11857 src = build_simple_mem_ref_loc (loc, sarg);
11858 src = omp_build_component_ref (src, sf);
11859 src = build_simple_mem_ref_loc (loc, src);
11860 dst = build_simple_mem_ref_loc (loc, arg);
11861 dst = omp_build_component_ref (dst, f);
11862 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11863 append_to_statement_list (t, &list);
11864 n = splay_tree_lookup (ctx->field_map,
11865 (splay_tree_key) TREE_OPERAND (ind, 0));
11866 df = (tree) n->value;
11867 df = *tcctx.cb.decl_map->get (df);
11868 ptr = build_simple_mem_ref_loc (loc, arg);
11869 ptr = omp_build_component_ref (ptr, df);
11870 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11871 build_fold_addr_expr_loc (loc, dst));
11872 append_to_statement_list (t, &list);
11875 t = build1 (RETURN_EXPR, void_type_node, NULL);
11876 append_to_statement_list (t, &list);
11878 if (tcctx.cb.decl_map)
11879 delete tcctx.cb.decl_map;
11880 pop_gimplify_context (NULL);
11881 BIND_EXPR_BODY (bind) = list;
11882 pop_cfun ();
11885 static void
11886 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11888 tree c, clauses;
11889 gimple *g;
11890 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11892 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11893 gcc_assert (clauses);
11894 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11895 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11896 switch (OMP_CLAUSE_DEPEND_KIND (c))
11898 case OMP_CLAUSE_DEPEND_LAST:
11899 /* Lowering already done at gimplification. */
11900 return;
11901 case OMP_CLAUSE_DEPEND_IN:
11902 cnt[2]++;
11903 break;
11904 case OMP_CLAUSE_DEPEND_OUT:
11905 case OMP_CLAUSE_DEPEND_INOUT:
11906 cnt[0]++;
11907 break;
11908 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11909 cnt[1]++;
11910 break;
11911 case OMP_CLAUSE_DEPEND_DEPOBJ:
11912 cnt[3]++;
11913 break;
11914 case OMP_CLAUSE_DEPEND_SOURCE:
11915 case OMP_CLAUSE_DEPEND_SINK:
11916 /* FALLTHRU */
11917 default:
11918 gcc_unreachable ();
11920 if (cnt[1] || cnt[3])
11921 idx = 5;
11922 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11923 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11924 tree array = create_tmp_var (type);
11925 TREE_ADDRESSABLE (array) = 1;
11926 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11927 NULL_TREE);
11928 if (idx == 5)
11930 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11931 gimple_seq_add_stmt (iseq, g);
11932 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11933 NULL_TREE);
11935 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11936 gimple_seq_add_stmt (iseq, g);
11937 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11939 r = build4 (ARRAY_REF, ptr_type_node, array,
11940 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11941 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11942 gimple_seq_add_stmt (iseq, g);
11944 for (i = 0; i < 4; i++)
11946 if (cnt[i] == 0)
11947 continue;
11948 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11949 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11950 continue;
11951 else
11953 switch (OMP_CLAUSE_DEPEND_KIND (c))
11955 case OMP_CLAUSE_DEPEND_IN:
11956 if (i != 2)
11957 continue;
11958 break;
11959 case OMP_CLAUSE_DEPEND_OUT:
11960 case OMP_CLAUSE_DEPEND_INOUT:
11961 if (i != 0)
11962 continue;
11963 break;
11964 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11965 if (i != 1)
11966 continue;
11967 break;
11968 case OMP_CLAUSE_DEPEND_DEPOBJ:
11969 if (i != 3)
11970 continue;
11971 break;
11972 default:
11973 gcc_unreachable ();
11975 tree t = OMP_CLAUSE_DECL (c);
11976 t = fold_convert (ptr_type_node, t);
11977 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11978 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11979 NULL_TREE, NULL_TREE);
11980 g = gimple_build_assign (r, t);
11981 gimple_seq_add_stmt (iseq, g);
11984 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11985 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11986 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11987 OMP_CLAUSE_CHAIN (c) = *pclauses;
11988 *pclauses = c;
11989 tree clobber = build_clobber (type);
11990 g = gimple_build_assign (array, clobber);
11991 gimple_seq_add_stmt (oseq, g);
11994 /* Lower the OpenMP parallel or task directive in the current statement
11995 in GSI_P. CTX holds context information for the directive. */
11997 static void
11998 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12000 tree clauses;
12001 tree child_fn, t;
12002 gimple *stmt = gsi_stmt (*gsi_p);
12003 gbind *par_bind, *bind, *dep_bind = NULL;
12004 gimple_seq par_body;
12005 location_t loc = gimple_location (stmt);
12007 clauses = gimple_omp_taskreg_clauses (stmt);
12008 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12009 && gimple_omp_task_taskwait_p (stmt))
12011 par_bind = NULL;
12012 par_body = NULL;
12014 else
12016 par_bind
12017 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12018 par_body = gimple_bind_body (par_bind);
12020 child_fn = ctx->cb.dst_fn;
12021 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12022 && !gimple_omp_parallel_combined_p (stmt))
12024 struct walk_stmt_info wi;
12025 int ws_num = 0;
12027 memset (&wi, 0, sizeof (wi));
12028 wi.info = &ws_num;
12029 wi.val_only = true;
12030 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12031 if (ws_num == 1)
12032 gimple_omp_parallel_set_combined_p (stmt, true);
12034 gimple_seq dep_ilist = NULL;
12035 gimple_seq dep_olist = NULL;
12036 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12037 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12039 push_gimplify_context ();
12040 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12041 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12042 &dep_ilist, &dep_olist);
12045 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12046 && gimple_omp_task_taskwait_p (stmt))
12048 if (dep_bind)
12050 gsi_replace (gsi_p, dep_bind, true);
12051 gimple_bind_add_seq (dep_bind, dep_ilist);
12052 gimple_bind_add_stmt (dep_bind, stmt);
12053 gimple_bind_add_seq (dep_bind, dep_olist);
12054 pop_gimplify_context (dep_bind);
12056 return;
12059 if (ctx->srecord_type)
12060 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12062 gimple_seq tskred_ilist = NULL;
12063 gimple_seq tskred_olist = NULL;
12064 if ((is_task_ctx (ctx)
12065 && gimple_omp_task_taskloop_p (ctx->stmt)
12066 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12067 OMP_CLAUSE_REDUCTION))
12068 || (is_parallel_ctx (ctx)
12069 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12070 OMP_CLAUSE__REDUCTEMP_)))
12072 if (dep_bind == NULL)
12074 push_gimplify_context ();
12075 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12077 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12078 : OMP_PARALLEL,
12079 gimple_omp_taskreg_clauses (ctx->stmt),
12080 &tskred_ilist, &tskred_olist);
12083 push_gimplify_context ();
12085 gimple_seq par_olist = NULL;
12086 gimple_seq par_ilist = NULL;
12087 gimple_seq par_rlist = NULL;
12088 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12089 lower_omp (&par_body, ctx);
12090 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12091 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12093 /* Declare all the variables created by mapping and the variables
12094 declared in the scope of the parallel body. */
12095 record_vars_into (ctx->block_vars, child_fn);
12096 maybe_remove_omp_member_access_dummy_vars (par_bind);
12097 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12099 if (ctx->record_type)
12101 ctx->sender_decl
12102 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12103 : ctx->record_type, ".omp_data_o");
12104 DECL_NAMELESS (ctx->sender_decl) = 1;
12105 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12106 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12109 gimple_seq olist = NULL;
12110 gimple_seq ilist = NULL;
12111 lower_send_clauses (clauses, &ilist, &olist, ctx);
12112 lower_send_shared_vars (&ilist, &olist, ctx);
12114 if (ctx->record_type)
12116 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12117 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12118 clobber));
12121 /* Once all the expansions are done, sequence all the different
12122 fragments inside gimple_omp_body. */
12124 gimple_seq new_body = NULL;
12126 if (ctx->record_type)
12128 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12129 /* fixup_child_record_type might have changed receiver_decl's type. */
12130 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12131 gimple_seq_add_stmt (&new_body,
12132 gimple_build_assign (ctx->receiver_decl, t));
12135 gimple_seq_add_seq (&new_body, par_ilist);
12136 gimple_seq_add_seq (&new_body, par_body);
12137 gimple_seq_add_seq (&new_body, par_rlist);
12138 if (ctx->cancellable)
12139 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12140 gimple_seq_add_seq (&new_body, par_olist);
12141 new_body = maybe_catch_exception (new_body);
12142 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12143 gimple_seq_add_stmt (&new_body,
12144 gimple_build_omp_continue (integer_zero_node,
12145 integer_zero_node));
12146 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12147 gimple_omp_set_body (stmt, new_body);
12149 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12150 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12151 else
12152 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12153 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12154 gimple_bind_add_seq (bind, ilist);
12155 gimple_bind_add_stmt (bind, stmt);
12156 gimple_bind_add_seq (bind, olist);
12158 pop_gimplify_context (NULL);
12160 if (dep_bind)
12162 gimple_bind_add_seq (dep_bind, dep_ilist);
12163 gimple_bind_add_seq (dep_bind, tskred_ilist);
12164 gimple_bind_add_stmt (dep_bind, bind);
12165 gimple_bind_add_seq (dep_bind, tskred_olist);
12166 gimple_bind_add_seq (dep_bind, dep_olist);
12167 pop_gimplify_context (dep_bind);
12171 /* Lower the GIMPLE_OMP_TARGET in the current statement
12172 in GSI_P. CTX holds context information for the directive. */
12174 static void
12175 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12177 tree clauses;
12178 tree child_fn, t, c;
12179 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12180 gbind *tgt_bind, *bind, *dep_bind = NULL;
12181 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12182 location_t loc = gimple_location (stmt);
12183 bool offloaded, data_region;
12184 unsigned int map_cnt = 0;
12186 offloaded = is_gimple_omp_offloaded (stmt);
12187 switch (gimple_omp_target_kind (stmt))
12189 case GF_OMP_TARGET_KIND_REGION:
12190 case GF_OMP_TARGET_KIND_UPDATE:
12191 case GF_OMP_TARGET_KIND_ENTER_DATA:
12192 case GF_OMP_TARGET_KIND_EXIT_DATA:
12193 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12194 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12195 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12196 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12197 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
12198 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12199 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12200 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12201 data_region = false;
12202 break;
12203 case GF_OMP_TARGET_KIND_DATA:
12204 case GF_OMP_TARGET_KIND_OACC_DATA:
12205 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12206 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12207 data_region = true;
12208 break;
12209 default:
12210 gcc_unreachable ();
12213 clauses = gimple_omp_target_clauses (stmt);
12215 gimple_seq dep_ilist = NULL;
12216 gimple_seq dep_olist = NULL;
12217 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12219 push_gimplify_context ();
12220 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12221 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12222 &dep_ilist, &dep_olist);
12225 tgt_bind = NULL;
12226 tgt_body = NULL;
12227 if (offloaded)
12229 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12230 tgt_body = gimple_bind_body (tgt_bind);
12232 else if (data_region)
12233 tgt_body = gimple_omp_body (stmt);
12234 child_fn = ctx->cb.dst_fn;
12236 push_gimplify_context ();
12237 fplist = NULL;
12239 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12240 switch (OMP_CLAUSE_CODE (c))
12242 tree var, x;
12244 default:
12245 break;
12246 case OMP_CLAUSE_MAP:
12247 #if CHECKING_P
12248 /* First check what we're prepared to handle in the following. */
12249 switch (OMP_CLAUSE_MAP_KIND (c))
12251 case GOMP_MAP_ALLOC:
12252 case GOMP_MAP_TO:
12253 case GOMP_MAP_FROM:
12254 case GOMP_MAP_TOFROM:
12255 case GOMP_MAP_POINTER:
12256 case GOMP_MAP_TO_PSET:
12257 case GOMP_MAP_DELETE:
12258 case GOMP_MAP_RELEASE:
12259 case GOMP_MAP_ALWAYS_TO:
12260 case GOMP_MAP_ALWAYS_FROM:
12261 case GOMP_MAP_ALWAYS_TOFROM:
12262 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12263 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12264 case GOMP_MAP_STRUCT:
12265 case GOMP_MAP_ALWAYS_POINTER:
12266 case GOMP_MAP_ATTACH:
12267 case GOMP_MAP_DETACH:
12268 break;
12269 case GOMP_MAP_IF_PRESENT:
12270 case GOMP_MAP_FORCE_ALLOC:
12271 case GOMP_MAP_FORCE_TO:
12272 case GOMP_MAP_FORCE_FROM:
12273 case GOMP_MAP_FORCE_TOFROM:
12274 case GOMP_MAP_FORCE_PRESENT:
12275 case GOMP_MAP_FORCE_DEVICEPTR:
12276 case GOMP_MAP_DEVICE_RESIDENT:
12277 case GOMP_MAP_LINK:
12278 case GOMP_MAP_FORCE_DETACH:
12279 gcc_assert (is_gimple_omp_oacc (stmt));
12280 break;
12281 default:
12282 gcc_unreachable ();
12284 #endif
12285 /* FALLTHRU */
12286 case OMP_CLAUSE_TO:
12287 case OMP_CLAUSE_FROM:
12288 oacc_firstprivate:
12289 var = OMP_CLAUSE_DECL (c);
12290 if (!DECL_P (var))
12292 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12293 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12294 && (OMP_CLAUSE_MAP_KIND (c)
12295 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12296 map_cnt++;
12297 continue;
12300 if (DECL_SIZE (var)
12301 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12303 tree var2 = DECL_VALUE_EXPR (var);
12304 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12305 var2 = TREE_OPERAND (var2, 0);
12306 gcc_assert (DECL_P (var2));
12307 var = var2;
12310 if (offloaded
12311 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12312 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12313 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12315 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12317 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12318 && varpool_node::get_create (var)->offloadable)
12319 continue;
12321 tree type = build_pointer_type (TREE_TYPE (var));
12322 tree new_var = lookup_decl (var, ctx);
12323 x = create_tmp_var_raw (type, get_name (new_var));
12324 gimple_add_tmp_var (x);
12325 x = build_simple_mem_ref (x);
12326 SET_DECL_VALUE_EXPR (new_var, x);
12327 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12329 continue;
12332 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12333 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12334 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12335 && is_omp_target (stmt))
12337 gcc_assert (maybe_lookup_field (c, ctx));
12338 map_cnt++;
12339 continue;
12342 if (!maybe_lookup_field (var, ctx))
12343 continue;
12345 /* Don't remap compute constructs' reduction variables, because the
12346 intermediate result must be local to each gang. */
12347 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12348 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12350 x = build_receiver_ref (var, true, ctx);
12351 tree new_var = lookup_decl (var, ctx);
12353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12354 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12355 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12356 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12357 x = build_simple_mem_ref (x);
12358 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12360 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12361 if (omp_is_reference (new_var)
12362 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12363 || DECL_BY_REFERENCE (var)))
12365 /* Create a local object to hold the instance
12366 value. */
12367 tree type = TREE_TYPE (TREE_TYPE (new_var));
12368 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12369 tree inst = create_tmp_var (type, id);
12370 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12371 x = build_fold_addr_expr (inst);
12373 gimplify_assign (new_var, x, &fplist);
12375 else if (DECL_P (new_var))
12377 SET_DECL_VALUE_EXPR (new_var, x);
12378 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12380 else
12381 gcc_unreachable ();
12383 map_cnt++;
12384 break;
12386 case OMP_CLAUSE_FIRSTPRIVATE:
12387 gcc_checking_assert (offloaded);
12388 if (is_gimple_omp_oacc (ctx->stmt))
12390 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12391 gcc_checking_assert (!is_oacc_kernels (ctx));
12392 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12393 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12395 goto oacc_firstprivate;
12397 map_cnt++;
12398 var = OMP_CLAUSE_DECL (c);
12399 if (!omp_is_reference (var)
12400 && !is_gimple_reg_type (TREE_TYPE (var)))
12402 tree new_var = lookup_decl (var, ctx);
12403 if (is_variable_sized (var))
12405 tree pvar = DECL_VALUE_EXPR (var);
12406 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12407 pvar = TREE_OPERAND (pvar, 0);
12408 gcc_assert (DECL_P (pvar));
12409 tree new_pvar = lookup_decl (pvar, ctx);
12410 x = build_fold_indirect_ref (new_pvar);
12411 TREE_THIS_NOTRAP (x) = 1;
12413 else
12414 x = build_receiver_ref (var, true, ctx);
12415 SET_DECL_VALUE_EXPR (new_var, x);
12416 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12418 break;
12420 case OMP_CLAUSE_PRIVATE:
12421 gcc_checking_assert (offloaded);
12422 if (is_gimple_omp_oacc (ctx->stmt))
12424 /* No 'private' clauses on OpenACC 'kernels'. */
12425 gcc_checking_assert (!is_oacc_kernels (ctx));
12426 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12427 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12429 break;
12431 var = OMP_CLAUSE_DECL (c);
12432 if (is_variable_sized (var))
12434 tree new_var = lookup_decl (var, ctx);
12435 tree pvar = DECL_VALUE_EXPR (var);
12436 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12437 pvar = TREE_OPERAND (pvar, 0);
12438 gcc_assert (DECL_P (pvar));
12439 tree new_pvar = lookup_decl (pvar, ctx);
12440 x = build_fold_indirect_ref (new_pvar);
12441 TREE_THIS_NOTRAP (x) = 1;
12442 SET_DECL_VALUE_EXPR (new_var, x);
12443 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12445 break;
12447 case OMP_CLAUSE_USE_DEVICE_PTR:
12448 case OMP_CLAUSE_USE_DEVICE_ADDR:
12449 case OMP_CLAUSE_IS_DEVICE_PTR:
12450 var = OMP_CLAUSE_DECL (c);
12451 map_cnt++;
12452 if (is_variable_sized (var))
12454 tree new_var = lookup_decl (var, ctx);
12455 tree pvar = DECL_VALUE_EXPR (var);
12456 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12457 pvar = TREE_OPERAND (pvar, 0);
12458 gcc_assert (DECL_P (pvar));
12459 tree new_pvar = lookup_decl (pvar, ctx);
12460 x = build_fold_indirect_ref (new_pvar);
12461 TREE_THIS_NOTRAP (x) = 1;
12462 SET_DECL_VALUE_EXPR (new_var, x);
12463 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12465 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12466 && !omp_is_reference (var)
12467 && !omp_is_allocatable_or_ptr (var)
12468 && !lang_hooks.decls.omp_array_data (var, true))
12469 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12471 tree new_var = lookup_decl (var, ctx);
12472 tree type = build_pointer_type (TREE_TYPE (var));
12473 x = create_tmp_var_raw (type, get_name (new_var));
12474 gimple_add_tmp_var (x);
12475 x = build_simple_mem_ref (x);
12476 SET_DECL_VALUE_EXPR (new_var, x);
12477 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12479 else
12481 tree new_var = lookup_decl (var, ctx);
12482 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12483 gimple_add_tmp_var (x);
12484 SET_DECL_VALUE_EXPR (new_var, x);
12485 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12487 break;
12490 if (offloaded)
12492 target_nesting_level++;
12493 lower_omp (&tgt_body, ctx);
12494 target_nesting_level--;
12496 else if (data_region)
12497 lower_omp (&tgt_body, ctx);
12499 if (offloaded)
12501 /* Declare all the variables created by mapping and the variables
12502 declared in the scope of the target body. */
12503 record_vars_into (ctx->block_vars, child_fn);
12504 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12505 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12508 olist = NULL;
12509 ilist = NULL;
12510 if (ctx->record_type)
12512 ctx->sender_decl
12513 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12514 DECL_NAMELESS (ctx->sender_decl) = 1;
12515 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12516 t = make_tree_vec (3);
12517 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12518 TREE_VEC_ELT (t, 1)
12519 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12520 ".omp_data_sizes");
12521 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12522 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12523 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12524 tree tkind_type = short_unsigned_type_node;
12525 int talign_shift = 8;
12526 TREE_VEC_ELT (t, 2)
12527 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12528 ".omp_data_kinds");
12529 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12530 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12531 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12532 gimple_omp_target_set_data_arg (stmt, t);
12534 vec<constructor_elt, va_gc> *vsize;
12535 vec<constructor_elt, va_gc> *vkind;
12536 vec_alloc (vsize, map_cnt);
12537 vec_alloc (vkind, map_cnt);
12538 unsigned int map_idx = 0;
12540 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12541 switch (OMP_CLAUSE_CODE (c))
12543 tree ovar, nc, s, purpose, var, x, type;
12544 unsigned int talign;
12546 default:
12547 break;
12549 case OMP_CLAUSE_MAP:
12550 case OMP_CLAUSE_TO:
12551 case OMP_CLAUSE_FROM:
12552 oacc_firstprivate_map:
12553 nc = c;
12554 ovar = OMP_CLAUSE_DECL (c);
12555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12556 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12557 || (OMP_CLAUSE_MAP_KIND (c)
12558 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12559 break;
12560 if (!DECL_P (ovar))
12562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12563 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12565 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
12566 == get_base_address (ovar));
12567 nc = OMP_CLAUSE_CHAIN (c);
12568 ovar = OMP_CLAUSE_DECL (nc);
12570 else
12572 tree x = build_sender_ref (ovar, ctx);
12573 tree v
12574 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
12575 gimplify_assign (x, v, &ilist);
12576 nc = NULL_TREE;
12579 else
12581 if (DECL_SIZE (ovar)
12582 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12584 tree ovar2 = DECL_VALUE_EXPR (ovar);
12585 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12586 ovar2 = TREE_OPERAND (ovar2, 0);
12587 gcc_assert (DECL_P (ovar2));
12588 ovar = ovar2;
12590 if (!maybe_lookup_field (ovar, ctx)
12591 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12592 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12593 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12594 continue;
12597 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12598 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12599 talign = DECL_ALIGN_UNIT (ovar);
12601 if (nc
12602 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12603 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12604 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12605 && is_omp_target (stmt))
12607 var = lookup_decl_in_outer_ctx (ovar, ctx);
12608 x = build_sender_ref (c, ctx);
12609 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
12611 else if (nc)
12613 var = lookup_decl_in_outer_ctx (ovar, ctx);
12614 x = build_sender_ref (ovar, ctx);
12616 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12617 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12618 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12619 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
12621 gcc_assert (offloaded);
12622 tree avar
12623 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
12624 mark_addressable (avar);
12625 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
12626 talign = DECL_ALIGN_UNIT (avar);
12627 avar = build_fold_addr_expr (avar);
12628 gimplify_assign (x, avar, &ilist);
12630 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12632 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12633 if (!omp_is_reference (var))
12635 if (is_gimple_reg (var)
12636 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12637 TREE_NO_WARNING (var) = 1;
12638 var = build_fold_addr_expr (var);
12640 else
12641 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12642 gimplify_assign (x, var, &ilist);
12644 else if (is_gimple_reg (var))
12646 gcc_assert (offloaded);
12647 tree avar = create_tmp_var (TREE_TYPE (var));
12648 mark_addressable (avar);
12649 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
12650 if (GOMP_MAP_COPY_TO_P (map_kind)
12651 || map_kind == GOMP_MAP_POINTER
12652 || map_kind == GOMP_MAP_TO_PSET
12653 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12655 /* If we need to initialize a temporary
12656 with VAR because it is not addressable, and
12657 the variable hasn't been initialized yet, then
12658 we'll get a warning for the store to avar.
12659 Don't warn in that case, the mapping might
12660 be implicit. */
12661 TREE_NO_WARNING (var) = 1;
12662 gimplify_assign (avar, var, &ilist);
12664 avar = build_fold_addr_expr (avar);
12665 gimplify_assign (x, avar, &ilist);
12666 if ((GOMP_MAP_COPY_FROM_P (map_kind)
12667 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12668 && !TYPE_READONLY (TREE_TYPE (var)))
12670 x = unshare_expr (x);
12671 x = build_simple_mem_ref (x);
12672 gimplify_assign (var, x, &olist);
12675 else
12677 /* While MAP is handled explicitly by the FE,
12678 for 'target update', only the identified is passed. */
12679 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
12680 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
12681 && (omp_is_allocatable_or_ptr (var)
12682 && omp_check_optional_argument (var, false)))
12683 var = build_fold_indirect_ref (var);
12684 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
12685 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
12686 || (!omp_is_allocatable_or_ptr (var)
12687 && !omp_check_optional_argument (var, false)))
12688 var = build_fold_addr_expr (var);
12689 gimplify_assign (x, var, &ilist);
12692 s = NULL_TREE;
12693 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12695 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12696 s = TREE_TYPE (ovar);
12697 if (TREE_CODE (s) == REFERENCE_TYPE
12698 || omp_check_optional_argument (ovar, false))
12699 s = TREE_TYPE (s);
12700 s = TYPE_SIZE_UNIT (s);
12702 else
12703 s = OMP_CLAUSE_SIZE (c);
12704 if (s == NULL_TREE)
12705 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12706 s = fold_convert (size_type_node, s);
12707 purpose = size_int (map_idx++);
12708 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12709 if (TREE_CODE (s) != INTEGER_CST)
12710 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12712 unsigned HOST_WIDE_INT tkind, tkind_zero;
12713 switch (OMP_CLAUSE_CODE (c))
12715 case OMP_CLAUSE_MAP:
12716 tkind = OMP_CLAUSE_MAP_KIND (c);
12717 tkind_zero = tkind;
12718 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
12719 switch (tkind)
12721 case GOMP_MAP_ALLOC:
12722 case GOMP_MAP_IF_PRESENT:
12723 case GOMP_MAP_TO:
12724 case GOMP_MAP_FROM:
12725 case GOMP_MAP_TOFROM:
12726 case GOMP_MAP_ALWAYS_TO:
12727 case GOMP_MAP_ALWAYS_FROM:
12728 case GOMP_MAP_ALWAYS_TOFROM:
12729 case GOMP_MAP_RELEASE:
12730 case GOMP_MAP_FORCE_TO:
12731 case GOMP_MAP_FORCE_FROM:
12732 case GOMP_MAP_FORCE_TOFROM:
12733 case GOMP_MAP_FORCE_PRESENT:
12734 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
12735 break;
12736 case GOMP_MAP_DELETE:
12737 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
12738 default:
12739 break;
12741 if (tkind_zero != tkind)
12743 if (integer_zerop (s))
12744 tkind = tkind_zero;
12745 else if (integer_nonzerop (s))
12746 tkind_zero = tkind;
12748 break;
12749 case OMP_CLAUSE_FIRSTPRIVATE:
12750 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12751 tkind = GOMP_MAP_TO;
12752 tkind_zero = tkind;
12753 break;
12754 case OMP_CLAUSE_TO:
12755 tkind = GOMP_MAP_TO;
12756 tkind_zero = tkind;
12757 break;
12758 case OMP_CLAUSE_FROM:
12759 tkind = GOMP_MAP_FROM;
12760 tkind_zero = tkind;
12761 break;
12762 default:
12763 gcc_unreachable ();
12765 gcc_checking_assert (tkind
12766 < (HOST_WIDE_INT_C (1U) << talign_shift));
12767 gcc_checking_assert (tkind_zero
12768 < (HOST_WIDE_INT_C (1U) << talign_shift));
12769 talign = ceil_log2 (talign);
12770 tkind |= talign << talign_shift;
12771 tkind_zero |= talign << talign_shift;
12772 gcc_checking_assert (tkind
12773 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12774 gcc_checking_assert (tkind_zero
12775 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12776 if (tkind == tkind_zero)
12777 x = build_int_cstu (tkind_type, tkind);
12778 else
12780 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
12781 x = build3 (COND_EXPR, tkind_type,
12782 fold_build2 (EQ_EXPR, boolean_type_node,
12783 unshare_expr (s), size_zero_node),
12784 build_int_cstu (tkind_type, tkind_zero),
12785 build_int_cstu (tkind_type, tkind));
12787 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
12788 if (nc && nc != c)
12789 c = nc;
12790 break;
12792 case OMP_CLAUSE_FIRSTPRIVATE:
12793 if (is_gimple_omp_oacc (ctx->stmt))
12794 goto oacc_firstprivate_map;
12795 ovar = OMP_CLAUSE_DECL (c);
12796 if (omp_is_reference (ovar))
12797 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12798 else
12799 talign = DECL_ALIGN_UNIT (ovar);
12800 var = lookup_decl_in_outer_ctx (ovar, ctx);
12801 x = build_sender_ref (ovar, ctx);
12802 tkind = GOMP_MAP_FIRSTPRIVATE;
12803 type = TREE_TYPE (ovar);
12804 if (omp_is_reference (ovar))
12805 type = TREE_TYPE (type);
12806 if ((INTEGRAL_TYPE_P (type)
12807 && TYPE_PRECISION (type) <= POINTER_SIZE)
12808 || TREE_CODE (type) == POINTER_TYPE)
12810 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12811 tree t = var;
12812 if (omp_is_reference (var))
12813 t = build_simple_mem_ref (var);
12814 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12815 TREE_NO_WARNING (var) = 1;
12816 if (TREE_CODE (type) != POINTER_TYPE)
12817 t = fold_convert (pointer_sized_int_node, t);
12818 t = fold_convert (TREE_TYPE (x), t);
12819 gimplify_assign (x, t, &ilist);
12821 else if (omp_is_reference (var))
12822 gimplify_assign (x, var, &ilist);
12823 else if (is_gimple_reg (var))
12825 tree avar = create_tmp_var (TREE_TYPE (var));
12826 mark_addressable (avar);
12827 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12828 TREE_NO_WARNING (var) = 1;
12829 gimplify_assign (avar, var, &ilist);
12830 avar = build_fold_addr_expr (avar);
12831 gimplify_assign (x, avar, &ilist);
12833 else
12835 var = build_fold_addr_expr (var);
12836 gimplify_assign (x, var, &ilist);
12838 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
12839 s = size_int (0);
12840 else if (omp_is_reference (ovar))
12841 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12842 else
12843 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12844 s = fold_convert (size_type_node, s);
12845 purpose = size_int (map_idx++);
12846 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12847 if (TREE_CODE (s) != INTEGER_CST)
12848 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12850 gcc_checking_assert (tkind
12851 < (HOST_WIDE_INT_C (1U) << talign_shift));
12852 talign = ceil_log2 (talign);
12853 tkind |= talign << talign_shift;
12854 gcc_checking_assert (tkind
12855 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12856 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12857 build_int_cstu (tkind_type, tkind));
12858 break;
12860 case OMP_CLAUSE_USE_DEVICE_PTR:
12861 case OMP_CLAUSE_USE_DEVICE_ADDR:
12862 case OMP_CLAUSE_IS_DEVICE_PTR:
12863 ovar = OMP_CLAUSE_DECL (c);
12864 var = lookup_decl_in_outer_ctx (ovar, ctx);
12866 if (lang_hooks.decls.omp_array_data (ovar, true))
12868 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12869 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12870 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12872 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12874 tkind = GOMP_MAP_USE_DEVICE_PTR;
12875 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12877 else
12879 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12880 x = build_sender_ref (ovar, ctx);
12883 if (is_gimple_omp_oacc (ctx->stmt))
12885 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12887 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12888 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12891 type = TREE_TYPE (ovar);
12892 if (lang_hooks.decls.omp_array_data (ovar, true))
12893 var = lang_hooks.decls.omp_array_data (ovar, false);
12894 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12895 && !omp_is_reference (ovar)
12896 && !omp_is_allocatable_or_ptr (ovar))
12897 || TREE_CODE (type) == ARRAY_TYPE)
12898 var = build_fold_addr_expr (var);
12899 else
12901 if (omp_is_reference (ovar)
12902 || omp_check_optional_argument (ovar, false)
12903 || omp_is_allocatable_or_ptr (ovar))
12905 type = TREE_TYPE (type);
12906 if (POINTER_TYPE_P (type)
12907 && TREE_CODE (type) != ARRAY_TYPE
12908 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12909 && !omp_is_allocatable_or_ptr (ovar))
12910 || (omp_is_reference (ovar)
12911 && omp_is_allocatable_or_ptr (ovar))))
12912 var = build_simple_mem_ref (var);
12913 var = fold_convert (TREE_TYPE (x), var);
12916 tree present;
12917 present = omp_check_optional_argument (ovar, true);
12918 if (present)
12920 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12921 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12922 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12923 tree new_x = unshare_expr (x);
12924 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12925 fb_rvalue);
12926 gcond *cond = gimple_build_cond_from_tree (present,
12927 notnull_label,
12928 null_label);
12929 gimple_seq_add_stmt (&ilist, cond);
12930 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12931 gimplify_assign (new_x, null_pointer_node, &ilist);
12932 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12933 gimple_seq_add_stmt (&ilist,
12934 gimple_build_label (notnull_label));
12935 gimplify_assign (x, var, &ilist);
12936 gimple_seq_add_stmt (&ilist,
12937 gimple_build_label (opt_arg_label));
12939 else
12940 gimplify_assign (x, var, &ilist);
12941 s = size_int (0);
12942 purpose = size_int (map_idx++);
12943 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12944 gcc_checking_assert (tkind
12945 < (HOST_WIDE_INT_C (1U) << talign_shift));
12946 gcc_checking_assert (tkind
12947 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12948 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12949 build_int_cstu (tkind_type, tkind));
12950 break;
12953 gcc_assert (map_idx == map_cnt);
12955 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12956 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12957 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12958 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12959 for (int i = 1; i <= 2; i++)
12960 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12962 gimple_seq initlist = NULL;
12963 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12964 TREE_VEC_ELT (t, i)),
12965 &initlist, true, NULL_TREE);
12966 gimple_seq_add_seq (&ilist, initlist);
12968 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12969 gimple_seq_add_stmt (&olist,
12970 gimple_build_assign (TREE_VEC_ELT (t, i),
12971 clobber));
12973 else if (omp_maybe_offloaded_ctx (ctx->outer))
12975 tree id = get_identifier ("omp declare target");
12976 tree decl = TREE_VEC_ELT (t, i);
12977 DECL_ATTRIBUTES (decl)
12978 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12979 varpool_node *node = varpool_node::get (decl);
12980 if (node)
12982 node->offloadable = 1;
12983 if (ENABLE_OFFLOADING)
12985 g->have_offload = true;
12986 vec_safe_push (offload_vars, t);
12991 tree clobber = build_clobber (ctx->record_type);
12992 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12993 clobber));
12996 /* Once all the expansions are done, sequence all the different
12997 fragments inside gimple_omp_body. */
12999 new_body = NULL;
13001 if (offloaded
13002 && ctx->record_type)
13004 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13005 /* fixup_child_record_type might have changed receiver_decl's type. */
13006 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13007 gimple_seq_add_stmt (&new_body,
13008 gimple_build_assign (ctx->receiver_decl, t));
13010 gimple_seq_add_seq (&new_body, fplist);
13012 if (offloaded || data_region)
13014 tree prev = NULL_TREE;
13015 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13016 switch (OMP_CLAUSE_CODE (c))
13018 tree var, x;
13019 default:
13020 break;
13021 case OMP_CLAUSE_FIRSTPRIVATE:
13022 if (is_gimple_omp_oacc (ctx->stmt))
13023 break;
13024 var = OMP_CLAUSE_DECL (c);
13025 if (omp_is_reference (var)
13026 || is_gimple_reg_type (TREE_TYPE (var)))
13028 tree new_var = lookup_decl (var, ctx);
13029 tree type;
13030 type = TREE_TYPE (var);
13031 if (omp_is_reference (var))
13032 type = TREE_TYPE (type);
13033 if ((INTEGRAL_TYPE_P (type)
13034 && TYPE_PRECISION (type) <= POINTER_SIZE)
13035 || TREE_CODE (type) == POINTER_TYPE)
13037 x = build_receiver_ref (var, false, ctx);
13038 if (TREE_CODE (type) != POINTER_TYPE)
13039 x = fold_convert (pointer_sized_int_node, x);
13040 x = fold_convert (type, x);
13041 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13042 fb_rvalue);
13043 if (omp_is_reference (var))
13045 tree v = create_tmp_var_raw (type, get_name (var));
13046 gimple_add_tmp_var (v);
13047 TREE_ADDRESSABLE (v) = 1;
13048 gimple_seq_add_stmt (&new_body,
13049 gimple_build_assign (v, x));
13050 x = build_fold_addr_expr (v);
13052 gimple_seq_add_stmt (&new_body,
13053 gimple_build_assign (new_var, x));
13055 else
13057 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
13058 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13059 fb_rvalue);
13060 gimple_seq_add_stmt (&new_body,
13061 gimple_build_assign (new_var, x));
13064 else if (is_variable_sized (var))
13066 tree pvar = DECL_VALUE_EXPR (var);
13067 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13068 pvar = TREE_OPERAND (pvar, 0);
13069 gcc_assert (DECL_P (pvar));
13070 tree new_var = lookup_decl (pvar, ctx);
13071 x = build_receiver_ref (var, false, ctx);
13072 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13073 gimple_seq_add_stmt (&new_body,
13074 gimple_build_assign (new_var, x));
13076 break;
13077 case OMP_CLAUSE_PRIVATE:
13078 if (is_gimple_omp_oacc (ctx->stmt))
13079 break;
13080 var = OMP_CLAUSE_DECL (c);
13081 if (omp_is_reference (var))
13083 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13084 tree new_var = lookup_decl (var, ctx);
13085 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13086 if (TREE_CONSTANT (x))
13088 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13089 get_name (var));
13090 gimple_add_tmp_var (x);
13091 TREE_ADDRESSABLE (x) = 1;
13092 x = build_fold_addr_expr_loc (clause_loc, x);
13094 else
13095 break;
13097 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13098 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13099 gimple_seq_add_stmt (&new_body,
13100 gimple_build_assign (new_var, x));
13102 break;
13103 case OMP_CLAUSE_USE_DEVICE_PTR:
13104 case OMP_CLAUSE_USE_DEVICE_ADDR:
13105 case OMP_CLAUSE_IS_DEVICE_PTR:
13106 tree new_var;
13107 gimple_seq assign_body;
13108 bool is_array_data;
13109 bool do_optional_check;
13110 assign_body = NULL;
13111 do_optional_check = false;
13112 var = OMP_CLAUSE_DECL (c);
13113 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13115 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13116 x = build_sender_ref (is_array_data
13117 ? (splay_tree_key) &DECL_NAME (var)
13118 : (splay_tree_key) &DECL_UID (var), ctx);
13119 else
13120 x = build_receiver_ref (var, false, ctx);
13122 if (is_array_data)
13124 bool is_ref = omp_is_reference (var);
13125 do_optional_check = true;
13126 /* First, we copy the descriptor data from the host; then
13127 we update its data to point to the target address. */
13128 new_var = lookup_decl (var, ctx);
13129 new_var = DECL_VALUE_EXPR (new_var);
13130 tree v = new_var;
13132 if (is_ref)
13134 var = build_fold_indirect_ref (var);
13135 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13136 fb_rvalue);
13137 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13138 gimple_add_tmp_var (v);
13139 TREE_ADDRESSABLE (v) = 1;
13140 gimple_seq_add_stmt (&assign_body,
13141 gimple_build_assign (v, var));
13142 tree rhs = build_fold_addr_expr (v);
13143 gimple_seq_add_stmt (&assign_body,
13144 gimple_build_assign (new_var, rhs));
13146 else
13147 gimple_seq_add_stmt (&assign_body,
13148 gimple_build_assign (new_var, var));
13150 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13151 gcc_assert (v2);
13152 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13153 gimple_seq_add_stmt (&assign_body,
13154 gimple_build_assign (v2, x));
13156 else if (is_variable_sized (var))
13158 tree pvar = DECL_VALUE_EXPR (var);
13159 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13160 pvar = TREE_OPERAND (pvar, 0);
13161 gcc_assert (DECL_P (pvar));
13162 new_var = lookup_decl (pvar, ctx);
13163 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13164 gimple_seq_add_stmt (&assign_body,
13165 gimple_build_assign (new_var, x));
13167 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13168 && !omp_is_reference (var)
13169 && !omp_is_allocatable_or_ptr (var))
13170 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13172 new_var = lookup_decl (var, ctx);
13173 new_var = DECL_VALUE_EXPR (new_var);
13174 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13175 new_var = TREE_OPERAND (new_var, 0);
13176 gcc_assert (DECL_P (new_var));
13177 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13178 gimple_seq_add_stmt (&assign_body,
13179 gimple_build_assign (new_var, x));
13181 else
13183 tree type = TREE_TYPE (var);
13184 new_var = lookup_decl (var, ctx);
13185 if (omp_is_reference (var))
13187 type = TREE_TYPE (type);
13188 if (POINTER_TYPE_P (type)
13189 && TREE_CODE (type) != ARRAY_TYPE
13190 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13191 || (omp_is_reference (var)
13192 && omp_is_allocatable_or_ptr (var))))
13194 tree v = create_tmp_var_raw (type, get_name (var));
13195 gimple_add_tmp_var (v);
13196 TREE_ADDRESSABLE (v) = 1;
13197 x = fold_convert (type, x);
13198 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13199 fb_rvalue);
13200 gimple_seq_add_stmt (&assign_body,
13201 gimple_build_assign (v, x));
13202 x = build_fold_addr_expr (v);
13203 do_optional_check = true;
13206 new_var = DECL_VALUE_EXPR (new_var);
13207 x = fold_convert (TREE_TYPE (new_var), x);
13208 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13209 gimple_seq_add_stmt (&assign_body,
13210 gimple_build_assign (new_var, x));
13212 tree present;
13213 present = (do_optional_check
13214 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13215 : NULL_TREE);
13216 if (present)
13218 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13219 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13220 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13221 glabel *null_glabel = gimple_build_label (null_label);
13222 glabel *notnull_glabel = gimple_build_label (notnull_label);
13223 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13224 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13225 fb_rvalue);
13226 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13227 fb_rvalue);
13228 gcond *cond = gimple_build_cond_from_tree (present,
13229 notnull_label,
13230 null_label);
13231 gimple_seq_add_stmt (&new_body, cond);
13232 gimple_seq_add_stmt (&new_body, null_glabel);
13233 gimplify_assign (new_var, null_pointer_node, &new_body);
13234 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13235 gimple_seq_add_stmt (&new_body, notnull_glabel);
13236 gimple_seq_add_seq (&new_body, assign_body);
13237 gimple_seq_add_stmt (&new_body,
13238 gimple_build_label (opt_arg_label));
13240 else
13241 gimple_seq_add_seq (&new_body, assign_body);
13242 break;
13244 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13245 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13246 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13247 or references to VLAs. */
13248 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13249 switch (OMP_CLAUSE_CODE (c))
13251 tree var;
13252 default:
13253 break;
13254 case OMP_CLAUSE_MAP:
13255 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13256 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13258 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13259 poly_int64 offset = 0;
13260 gcc_assert (prev);
13261 var = OMP_CLAUSE_DECL (c);
13262 if (DECL_P (var)
13263 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13264 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13265 ctx))
13266 && varpool_node::get_create (var)->offloadable)
13267 break;
13268 if (TREE_CODE (var) == INDIRECT_REF
13269 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13270 var = TREE_OPERAND (var, 0);
13271 if (TREE_CODE (var) == COMPONENT_REF)
13273 var = get_addr_base_and_unit_offset (var, &offset);
13274 gcc_assert (var != NULL_TREE && DECL_P (var));
13276 else if (DECL_SIZE (var)
13277 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13279 tree var2 = DECL_VALUE_EXPR (var);
13280 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13281 var2 = TREE_OPERAND (var2, 0);
13282 gcc_assert (DECL_P (var2));
13283 var = var2;
13285 tree new_var = lookup_decl (var, ctx), x;
13286 tree type = TREE_TYPE (new_var);
13287 bool is_ref;
13288 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13289 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13290 == COMPONENT_REF))
13292 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13293 is_ref = true;
13294 new_var = build2 (MEM_REF, type,
13295 build_fold_addr_expr (new_var),
13296 build_int_cst (build_pointer_type (type),
13297 offset));
13299 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13301 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13302 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13303 new_var = build2 (MEM_REF, type,
13304 build_fold_addr_expr (new_var),
13305 build_int_cst (build_pointer_type (type),
13306 offset));
13308 else
13309 is_ref = omp_is_reference (var);
13310 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13311 is_ref = false;
13312 bool ref_to_array = false;
13313 if (is_ref)
13315 type = TREE_TYPE (type);
13316 if (TREE_CODE (type) == ARRAY_TYPE)
13318 type = build_pointer_type (type);
13319 ref_to_array = true;
13322 else if (TREE_CODE (type) == ARRAY_TYPE)
13324 tree decl2 = DECL_VALUE_EXPR (new_var);
13325 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13326 decl2 = TREE_OPERAND (decl2, 0);
13327 gcc_assert (DECL_P (decl2));
13328 new_var = decl2;
13329 type = TREE_TYPE (new_var);
13331 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13332 x = fold_convert_loc (clause_loc, type, x);
13333 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13335 tree bias = OMP_CLAUSE_SIZE (c);
13336 if (DECL_P (bias))
13337 bias = lookup_decl (bias, ctx);
13338 bias = fold_convert_loc (clause_loc, sizetype, bias);
13339 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13340 bias);
13341 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13342 TREE_TYPE (x), x, bias);
13344 if (ref_to_array)
13345 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13346 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13347 if (is_ref && !ref_to_array)
13349 tree t = create_tmp_var_raw (type, get_name (var));
13350 gimple_add_tmp_var (t);
13351 TREE_ADDRESSABLE (t) = 1;
13352 gimple_seq_add_stmt (&new_body,
13353 gimple_build_assign (t, x));
13354 x = build_fold_addr_expr_loc (clause_loc, t);
13356 gimple_seq_add_stmt (&new_body,
13357 gimple_build_assign (new_var, x));
13358 prev = NULL_TREE;
13360 else if (OMP_CLAUSE_CHAIN (c)
13361 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13362 == OMP_CLAUSE_MAP
13363 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13364 == GOMP_MAP_FIRSTPRIVATE_POINTER
13365 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13366 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13367 prev = c;
13368 break;
13369 case OMP_CLAUSE_PRIVATE:
13370 var = OMP_CLAUSE_DECL (c);
13371 if (is_variable_sized (var))
13373 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13374 tree new_var = lookup_decl (var, ctx);
13375 tree pvar = DECL_VALUE_EXPR (var);
13376 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13377 pvar = TREE_OPERAND (pvar, 0);
13378 gcc_assert (DECL_P (pvar));
13379 tree new_pvar = lookup_decl (pvar, ctx);
13380 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13381 tree al = size_int (DECL_ALIGN (var));
13382 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13383 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13384 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13385 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13386 gimple_seq_add_stmt (&new_body,
13387 gimple_build_assign (new_pvar, x));
13389 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
13391 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13392 tree new_var = lookup_decl (var, ctx);
13393 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13394 if (TREE_CONSTANT (x))
13395 break;
13396 else
13398 tree atmp
13399 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13400 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13401 tree al = size_int (TYPE_ALIGN (rtype));
13402 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13405 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13406 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13407 gimple_seq_add_stmt (&new_body,
13408 gimple_build_assign (new_var, x));
13410 break;
13413 gimple_seq fork_seq = NULL;
13414 gimple_seq join_seq = NULL;
13416 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13418 /* If there are reductions on the offloaded region itself, treat
13419 them as a dummy GANG loop. */
13420 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13422 gcall *private_marker = lower_oacc_private_marker (ctx);
13424 if (private_marker)
13425 gimple_call_set_arg (private_marker, 2, level);
13427 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13428 false, NULL, private_marker, NULL, &fork_seq,
13429 &join_seq, ctx);
13432 gimple_seq_add_seq (&new_body, fork_seq);
13433 gimple_seq_add_seq (&new_body, tgt_body);
13434 gimple_seq_add_seq (&new_body, join_seq);
13436 if (offloaded)
13438 new_body = maybe_catch_exception (new_body);
13439 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13441 gimple_omp_set_body (stmt, new_body);
13444 bind = gimple_build_bind (NULL, NULL,
13445 tgt_bind ? gimple_bind_block (tgt_bind)
13446 : NULL_TREE);
13447 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13448 gimple_bind_add_seq (bind, ilist);
13449 gimple_bind_add_stmt (bind, stmt);
13450 gimple_bind_add_seq (bind, olist);
13452 pop_gimplify_context (NULL);
13454 if (dep_bind)
13456 gimple_bind_add_seq (dep_bind, dep_ilist);
13457 gimple_bind_add_stmt (dep_bind, bind);
13458 gimple_bind_add_seq (dep_bind, dep_olist);
13459 pop_gimplify_context (dep_bind);
13463 /* Expand code for an OpenMP teams directive. */
13465 static void
13466 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13468 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13469 push_gimplify_context ();
13471 tree block = make_node (BLOCK);
13472 gbind *bind = gimple_build_bind (NULL, NULL, block);
13473 gsi_replace (gsi_p, bind, true);
13474 gimple_seq bind_body = NULL;
13475 gimple_seq dlist = NULL;
13476 gimple_seq olist = NULL;
13478 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13479 OMP_CLAUSE_NUM_TEAMS);
13480 if (num_teams == NULL_TREE)
13481 num_teams = build_int_cst (unsigned_type_node, 0);
13482 else
13484 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13485 num_teams = fold_convert (unsigned_type_node, num_teams);
13486 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13488 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13489 OMP_CLAUSE_THREAD_LIMIT);
13490 if (thread_limit == NULL_TREE)
13491 thread_limit = build_int_cst (unsigned_type_node, 0);
13492 else
13494 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13495 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13496 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13497 fb_rvalue);
13500 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13501 &bind_body, &dlist, ctx, NULL);
13502 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13503 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13504 NULL, ctx);
13505 gimple_seq_add_stmt (&bind_body, teams_stmt);
13507 location_t loc = gimple_location (teams_stmt);
13508 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13509 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13510 gimple_set_location (call, loc);
13511 gimple_seq_add_stmt (&bind_body, call);
13513 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13514 gimple_omp_set_body (teams_stmt, NULL);
13515 gimple_seq_add_seq (&bind_body, olist);
13516 gimple_seq_add_seq (&bind_body, dlist);
13517 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13518 gimple_bind_set_body (bind, bind_body);
13520 pop_gimplify_context (bind);
13522 gimple_bind_append_vars (bind, ctx->block_vars);
13523 BLOCK_VARS (block) = ctx->block_vars;
13524 if (BLOCK_VARS (block))
13525 TREE_USED (block) = 1;
13528 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13529 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13530 of OMP context, but with task_shared_vars set. */
13532 static tree
13533 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13534 void *data)
13536 tree t = *tp;
13538 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13539 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
13540 return t;
13542 if (task_shared_vars
13543 && DECL_P (t)
13544 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13545 return t;
13547 /* If a global variable has been privatized, TREE_CONSTANT on
13548 ADDR_EXPR might be wrong. */
13549 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13550 recompute_tree_invariant_for_addr_expr (t);
13552 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13553 return NULL_TREE;
13556 /* Data to be communicated between lower_omp_regimplify_operands and
13557 lower_omp_regimplify_operands_p. */
13559 struct lower_omp_regimplify_operands_data
13561 omp_context *ctx;
13562 vec<tree> *decls;
13565 /* Helper function for lower_omp_regimplify_operands. Find
13566 omp_member_access_dummy_var vars and adjust temporarily their
13567 DECL_VALUE_EXPRs if needed. */
13569 static tree
13570 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13571 void *data)
13573 tree t = omp_member_access_dummy_var (*tp);
13574 if (t)
13576 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13577 lower_omp_regimplify_operands_data *ldata
13578 = (lower_omp_regimplify_operands_data *) wi->info;
13579 tree o = maybe_lookup_decl (t, ldata->ctx);
13580 if (o != t)
13582 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13583 ldata->decls->safe_push (*tp);
13584 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13585 SET_DECL_VALUE_EXPR (*tp, v);
13588 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13589 return NULL_TREE;
13592 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13593 of omp_member_access_dummy_var vars during regimplification. */
13595 static void
13596 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
13597 gimple_stmt_iterator *gsi_p)
13599 auto_vec<tree, 10> decls;
13600 if (ctx)
13602 struct walk_stmt_info wi;
13603 memset (&wi, '\0', sizeof (wi));
13604 struct lower_omp_regimplify_operands_data data;
13605 data.ctx = ctx;
13606 data.decls = &decls;
13607 wi.info = &data;
13608 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
13610 gimple_regimplify_operands (stmt, gsi_p);
13611 while (!decls.is_empty ())
13613 tree t = decls.pop ();
13614 tree v = decls.pop ();
13615 SET_DECL_VALUE_EXPR (t, v);
13619 static void
13620 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13622 gimple *stmt = gsi_stmt (*gsi_p);
13623 struct walk_stmt_info wi;
13624 gcall *call_stmt;
13626 if (gimple_has_location (stmt))
13627 input_location = gimple_location (stmt);
13629 if (task_shared_vars)
13630 memset (&wi, '\0', sizeof (wi));
13632 /* If we have issued syntax errors, avoid doing any heavy lifting.
13633 Just replace the OMP directives with a NOP to avoid
13634 confusing RTL expansion. */
13635 if (seen_error () && is_gimple_omp (stmt))
13637 gsi_replace (gsi_p, gimple_build_nop (), true);
13638 return;
13641 switch (gimple_code (stmt))
13643 case GIMPLE_COND:
13645 gcond *cond_stmt = as_a <gcond *> (stmt);
13646 if ((ctx || task_shared_vars)
13647 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
13648 lower_omp_regimplify_p,
13649 ctx ? NULL : &wi, NULL)
13650 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
13651 lower_omp_regimplify_p,
13652 ctx ? NULL : &wi, NULL)))
13653 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
13655 break;
13656 case GIMPLE_CATCH:
13657 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
13658 break;
13659 case GIMPLE_EH_FILTER:
13660 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
13661 break;
13662 case GIMPLE_TRY:
13663 lower_omp (gimple_try_eval_ptr (stmt), ctx);
13664 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
13665 break;
13666 case GIMPLE_TRANSACTION:
13667 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
13668 ctx);
13669 break;
13670 case GIMPLE_BIND:
13671 if (ctx && is_gimple_omp_oacc (ctx->stmt))
13673 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
13674 oacc_privatization_scan_decl_chain (ctx, vars);
13676 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
13677 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
13678 break;
13679 case GIMPLE_OMP_PARALLEL:
13680 case GIMPLE_OMP_TASK:
13681 ctx = maybe_lookup_ctx (stmt);
13682 gcc_assert (ctx);
13683 if (ctx->cancellable)
13684 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13685 lower_omp_taskreg (gsi_p, ctx);
13686 break;
13687 case GIMPLE_OMP_FOR:
13688 ctx = maybe_lookup_ctx (stmt);
13689 gcc_assert (ctx);
13690 if (ctx->cancellable)
13691 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13692 lower_omp_for (gsi_p, ctx);
13693 break;
13694 case GIMPLE_OMP_SECTIONS:
13695 ctx = maybe_lookup_ctx (stmt);
13696 gcc_assert (ctx);
13697 if (ctx->cancellable)
13698 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13699 lower_omp_sections (gsi_p, ctx);
13700 break;
13701 case GIMPLE_OMP_SINGLE:
13702 ctx = maybe_lookup_ctx (stmt);
13703 gcc_assert (ctx);
13704 lower_omp_single (gsi_p, ctx);
13705 break;
13706 case GIMPLE_OMP_MASTER:
13707 ctx = maybe_lookup_ctx (stmt);
13708 gcc_assert (ctx);
13709 lower_omp_master (gsi_p, ctx);
13710 break;
13711 case GIMPLE_OMP_TASKGROUP:
13712 ctx = maybe_lookup_ctx (stmt);
13713 gcc_assert (ctx);
13714 lower_omp_taskgroup (gsi_p, ctx);
13715 break;
13716 case GIMPLE_OMP_ORDERED:
13717 ctx = maybe_lookup_ctx (stmt);
13718 gcc_assert (ctx);
13719 lower_omp_ordered (gsi_p, ctx);
13720 break;
13721 case GIMPLE_OMP_SCAN:
13722 ctx = maybe_lookup_ctx (stmt);
13723 gcc_assert (ctx);
13724 lower_omp_scan (gsi_p, ctx);
13725 break;
13726 case GIMPLE_OMP_CRITICAL:
13727 ctx = maybe_lookup_ctx (stmt);
13728 gcc_assert (ctx);
13729 lower_omp_critical (gsi_p, ctx);
13730 break;
13731 case GIMPLE_OMP_ATOMIC_LOAD:
13732 if ((ctx || task_shared_vars)
13733 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
13734 as_a <gomp_atomic_load *> (stmt)),
13735 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
13736 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13737 break;
13738 case GIMPLE_OMP_TARGET:
13739 ctx = maybe_lookup_ctx (stmt);
13740 gcc_assert (ctx);
13741 lower_omp_target (gsi_p, ctx);
13742 break;
13743 case GIMPLE_OMP_TEAMS:
13744 ctx = maybe_lookup_ctx (stmt);
13745 gcc_assert (ctx);
13746 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
13747 lower_omp_taskreg (gsi_p, ctx);
13748 else
13749 lower_omp_teams (gsi_p, ctx);
13750 break;
13751 case GIMPLE_CALL:
13752 tree fndecl;
13753 call_stmt = as_a <gcall *> (stmt);
13754 fndecl = gimple_call_fndecl (call_stmt);
13755 if (fndecl
13756 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13757 switch (DECL_FUNCTION_CODE (fndecl))
13759 case BUILT_IN_GOMP_BARRIER:
13760 if (ctx == NULL)
13761 break;
13762 /* FALLTHRU */
13763 case BUILT_IN_GOMP_CANCEL:
13764 case BUILT_IN_GOMP_CANCELLATION_POINT:
13765 omp_context *cctx;
13766 cctx = ctx;
13767 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
13768 cctx = cctx->outer;
13769 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
13770 if (!cctx->cancellable)
13772 if (DECL_FUNCTION_CODE (fndecl)
13773 == BUILT_IN_GOMP_CANCELLATION_POINT)
13775 stmt = gimple_build_nop ();
13776 gsi_replace (gsi_p, stmt, false);
13778 break;
13780 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
13782 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
13783 gimple_call_set_fndecl (call_stmt, fndecl);
13784 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
13786 tree lhs;
13787 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
13788 gimple_call_set_lhs (call_stmt, lhs);
13789 tree fallthru_label;
13790 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
13791 gimple *g;
13792 g = gimple_build_label (fallthru_label);
13793 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13794 g = gimple_build_cond (NE_EXPR, lhs,
13795 fold_convert (TREE_TYPE (lhs),
13796 boolean_false_node),
13797 cctx->cancel_label, fallthru_label);
13798 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13799 break;
13800 default:
13801 break;
13803 goto regimplify;
13805 case GIMPLE_ASSIGN:
13806 for (omp_context *up = ctx; up; up = up->outer)
13808 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
13809 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
13810 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
13811 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
13812 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
13813 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
13814 && (gimple_omp_target_kind (up->stmt)
13815 == GF_OMP_TARGET_KIND_DATA)))
13816 continue;
13817 else if (!up->lastprivate_conditional_map)
13818 break;
13819 tree lhs = get_base_address (gimple_assign_lhs (stmt));
13820 if (TREE_CODE (lhs) == MEM_REF
13821 && DECL_P (TREE_OPERAND (lhs, 0))
13822 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13823 0))) == REFERENCE_TYPE)
13824 lhs = TREE_OPERAND (lhs, 0);
13825 if (DECL_P (lhs))
13826 if (tree *v = up->lastprivate_conditional_map->get (lhs))
13828 tree clauses;
13829 if (up->combined_into_simd_safelen1)
13831 up = up->outer;
13832 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
13833 up = up->outer;
13835 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
13836 clauses = gimple_omp_for_clauses (up->stmt);
13837 else
13838 clauses = gimple_omp_sections_clauses (up->stmt);
13839 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
13840 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
13841 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13842 OMP_CLAUSE__CONDTEMP_);
13843 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
13844 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
13845 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13848 /* FALLTHRU */
13850 default:
13851 regimplify:
13852 if ((ctx || task_shared_vars)
13853 && walk_gimple_op (stmt, lower_omp_regimplify_p,
13854 ctx ? NULL : &wi))
13856 /* Just remove clobbers, this should happen only if we have
13857 "privatized" local addressable variables in SIMD regions,
13858 the clobber isn't needed in that case and gimplifying address
13859 of the ARRAY_REF into a pointer and creating MEM_REF based
13860 clobber would create worse code than we get with the clobber
13861 dropped. */
13862 if (gimple_clobber_p (stmt))
13864 gsi_replace (gsi_p, gimple_build_nop (), true);
13865 break;
13867 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13869 break;
13873 static void
13874 lower_omp (gimple_seq *body, omp_context *ctx)
13876 location_t saved_location = input_location;
13877 gimple_stmt_iterator gsi;
13878 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13879 lower_omp_1 (&gsi, ctx);
13880 /* During gimplification, we haven't folded statments inside offloading
13881 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13882 if (target_nesting_level || taskreg_nesting_level)
13883 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13884 fold_stmt (&gsi);
13885 input_location = saved_location;
13888 /* Main entry point. */
13890 static unsigned int
13891 execute_lower_omp (void)
13893 gimple_seq body;
13894 int i;
13895 omp_context *ctx;
13897 /* This pass always runs, to provide PROP_gimple_lomp.
13898 But often, there is nothing to do. */
13899 if (flag_openacc == 0 && flag_openmp == 0
13900 && flag_openmp_simd == 0)
13901 return 0;
13903 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13904 delete_omp_context);
13906 body = gimple_body (current_function_decl);
13908 scan_omp (&body, NULL);
13909 gcc_assert (taskreg_nesting_level == 0);
13910 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13911 finish_taskreg_scan (ctx);
13912 taskreg_contexts.release ();
13914 if (all_contexts->root)
13916 if (task_shared_vars)
13917 push_gimplify_context ();
13918 lower_omp (&body, NULL);
13919 if (task_shared_vars)
13920 pop_gimplify_context (NULL);
13923 if (all_contexts)
13925 splay_tree_delete (all_contexts);
13926 all_contexts = NULL;
13928 BITMAP_FREE (task_shared_vars);
13929 BITMAP_FREE (global_nonaddressable_vars);
13931 /* If current function is a method, remove artificial dummy VAR_DECL created
13932 for non-static data member privatization, they aren't needed for
13933 debuginfo nor anything else, have been already replaced everywhere in the
13934 IL and cause problems with LTO. */
13935 if (DECL_ARGUMENTS (current_function_decl)
13936 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13937 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13938 == POINTER_TYPE))
13939 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13940 return 0;
13943 namespace {
13945 const pass_data pass_data_lower_omp =
13947 GIMPLE_PASS, /* type */
13948 "omplower", /* name */
13949 OPTGROUP_OMP, /* optinfo_flags */
13950 TV_NONE, /* tv_id */
13951 PROP_gimple_any, /* properties_required */
13952 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13953 0, /* properties_destroyed */
13954 0, /* todo_flags_start */
13955 0, /* todo_flags_finish */
13958 class pass_lower_omp : public gimple_opt_pass
13960 public:
13961 pass_lower_omp (gcc::context *ctxt)
13962 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13965 /* opt_pass methods: */
13966 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13968 }; // class pass_lower_omp
13970 } // anon namespace
13972 gimple_opt_pass *
13973 make_pass_lower_omp (gcc::context *ctxt)
13975 return new pass_lower_omp (ctxt);
13978 /* The following is a utility to diagnose structured block violations.
13979 It is not part of the "omplower" pass, as that's invoked too late. It
13980 should be invoked by the respective front ends after gimplification. */
13982 static splay_tree all_labels;
13984 /* Check for mismatched contexts and generate an error if needed. Return
13985 true if an error is detected. */
13987 static bool
13988 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13989 gimple *branch_ctx, gimple *label_ctx)
13991 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13992 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13994 if (label_ctx == branch_ctx)
13995 return false;
13997 const char* kind = NULL;
13999 if (flag_openacc)
14001 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14002 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14004 gcc_checking_assert (kind == NULL);
14005 kind = "OpenACC";
14008 if (kind == NULL)
14010 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14011 kind = "OpenMP";
14014 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14015 so we could traverse it and issue a correct "exit" or "enter" error
14016 message upon a structured block violation.
14018 We built the context by building a list with tree_cons'ing, but there is
14019 no easy counterpart in gimple tuples. It seems like far too much work
14020 for issuing exit/enter error messages. If someone really misses the
14021 distinct error message... patches welcome. */
14023 #if 0
14024 /* Try to avoid confusing the user by producing and error message
14025 with correct "exit" or "enter" verbiage. We prefer "exit"
14026 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14027 if (branch_ctx == NULL)
14028 exit_p = false;
14029 else
14031 while (label_ctx)
14033 if (TREE_VALUE (label_ctx) == branch_ctx)
14035 exit_p = false;
14036 break;
14038 label_ctx = TREE_CHAIN (label_ctx);
14042 if (exit_p)
14043 error ("invalid exit from %s structured block", kind);
14044 else
14045 error ("invalid entry to %s structured block", kind);
14046 #endif
14048 /* If it's obvious we have an invalid entry, be specific about the error. */
14049 if (branch_ctx == NULL)
14050 error ("invalid entry to %s structured block", kind);
14051 else
14053 /* Otherwise, be vague and lazy, but efficient. */
14054 error ("invalid branch to/from %s structured block", kind);
14057 gsi_replace (gsi_p, gimple_build_nop (), false);
14058 return true;
14061 /* Pass 1: Create a minimal tree of structured blocks, and record
14062 where each label is found. */
14064 static tree
14065 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14066 struct walk_stmt_info *wi)
14068 gimple *context = (gimple *) wi->info;
14069 gimple *inner_context;
14070 gimple *stmt = gsi_stmt (*gsi_p);
14072 *handled_ops_p = true;
14074 switch (gimple_code (stmt))
14076 WALK_SUBSTMTS;
14078 case GIMPLE_OMP_PARALLEL:
14079 case GIMPLE_OMP_TASK:
14080 case GIMPLE_OMP_SECTIONS:
14081 case GIMPLE_OMP_SINGLE:
14082 case GIMPLE_OMP_SECTION:
14083 case GIMPLE_OMP_MASTER:
14084 case GIMPLE_OMP_ORDERED:
14085 case GIMPLE_OMP_SCAN:
14086 case GIMPLE_OMP_CRITICAL:
14087 case GIMPLE_OMP_TARGET:
14088 case GIMPLE_OMP_TEAMS:
14089 case GIMPLE_OMP_TASKGROUP:
14090 /* The minimal context here is just the current OMP construct. */
14091 inner_context = stmt;
14092 wi->info = inner_context;
14093 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14094 wi->info = context;
14095 break;
14097 case GIMPLE_OMP_FOR:
14098 inner_context = stmt;
14099 wi->info = inner_context;
14100 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14101 walk them. */
14102 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14103 diagnose_sb_1, NULL, wi);
14104 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14105 wi->info = context;
14106 break;
14108 case GIMPLE_LABEL:
14109 splay_tree_insert (all_labels,
14110 (splay_tree_key) gimple_label_label (
14111 as_a <glabel *> (stmt)),
14112 (splay_tree_value) context);
14113 break;
14115 default:
14116 break;
14119 return NULL_TREE;
14122 /* Pass 2: Check each branch and see if its context differs from that of
14123 the destination label's context. */
14125 static tree
14126 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14127 struct walk_stmt_info *wi)
14129 gimple *context = (gimple *) wi->info;
14130 splay_tree_node n;
14131 gimple *stmt = gsi_stmt (*gsi_p);
14133 *handled_ops_p = true;
14135 switch (gimple_code (stmt))
14137 WALK_SUBSTMTS;
14139 case GIMPLE_OMP_PARALLEL:
14140 case GIMPLE_OMP_TASK:
14141 case GIMPLE_OMP_SECTIONS:
14142 case GIMPLE_OMP_SINGLE:
14143 case GIMPLE_OMP_SECTION:
14144 case GIMPLE_OMP_MASTER:
14145 case GIMPLE_OMP_ORDERED:
14146 case GIMPLE_OMP_SCAN:
14147 case GIMPLE_OMP_CRITICAL:
14148 case GIMPLE_OMP_TARGET:
14149 case GIMPLE_OMP_TEAMS:
14150 case GIMPLE_OMP_TASKGROUP:
14151 wi->info = stmt;
14152 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14153 wi->info = context;
14154 break;
14156 case GIMPLE_OMP_FOR:
14157 wi->info = stmt;
14158 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14159 walk them. */
14160 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14161 diagnose_sb_2, NULL, wi);
14162 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14163 wi->info = context;
14164 break;
14166 case GIMPLE_COND:
14168 gcond *cond_stmt = as_a <gcond *> (stmt);
14169 tree lab = gimple_cond_true_label (cond_stmt);
14170 if (lab)
14172 n = splay_tree_lookup (all_labels,
14173 (splay_tree_key) lab);
14174 diagnose_sb_0 (gsi_p, context,
14175 n ? (gimple *) n->value : NULL);
14177 lab = gimple_cond_false_label (cond_stmt);
14178 if (lab)
14180 n = splay_tree_lookup (all_labels,
14181 (splay_tree_key) lab);
14182 diagnose_sb_0 (gsi_p, context,
14183 n ? (gimple *) n->value : NULL);
14186 break;
14188 case GIMPLE_GOTO:
14190 tree lab = gimple_goto_dest (stmt);
14191 if (TREE_CODE (lab) != LABEL_DECL)
14192 break;
14194 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14195 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14197 break;
14199 case GIMPLE_SWITCH:
14201 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14202 unsigned int i;
14203 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14205 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14206 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14207 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14208 break;
14211 break;
14213 case GIMPLE_RETURN:
14214 diagnose_sb_0 (gsi_p, context, NULL);
14215 break;
14217 default:
14218 break;
14221 return NULL_TREE;
14224 static unsigned int
14225 diagnose_omp_structured_block_errors (void)
14227 struct walk_stmt_info wi;
14228 gimple_seq body = gimple_body (current_function_decl);
14230 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14232 memset (&wi, 0, sizeof (wi));
14233 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14235 memset (&wi, 0, sizeof (wi));
14236 wi.want_locations = true;
14237 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14239 gimple_set_body (current_function_decl, body);
14241 splay_tree_delete (all_labels);
14242 all_labels = NULL;
14244 return 0;
14247 namespace {
14249 const pass_data pass_data_diagnose_omp_blocks =
14251 GIMPLE_PASS, /* type */
14252 "*diagnose_omp_blocks", /* name */
14253 OPTGROUP_OMP, /* optinfo_flags */
14254 TV_NONE, /* tv_id */
14255 PROP_gimple_any, /* properties_required */
14256 0, /* properties_provided */
14257 0, /* properties_destroyed */
14258 0, /* todo_flags_start */
14259 0, /* todo_flags_finish */
14262 class pass_diagnose_omp_blocks : public gimple_opt_pass
14264 public:
14265 pass_diagnose_omp_blocks (gcc::context *ctxt)
14266 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14269 /* opt_pass methods: */
14270 virtual bool gate (function *)
14272 return flag_openacc || flag_openmp || flag_openmp_simd;
14274 virtual unsigned int execute (function *)
14276 return diagnose_omp_structured_block_errors ();
14279 }; // class pass_diagnose_omp_blocks
14281 } // anon namespace
14283 gimple_opt_pass *
14284 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14286 return new pass_diagnose_omp_blocks (ctxt);
14290 #include "gt-omp-low.h"