c++: ICE with alias in pack expansion [PR103769]
[official-gcc.git] / gcc / omp-low.cc
blob392bb18bc5dd51965ee769e4db50f49a39a9a9be
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
199 #define WALK_SUBSTMTS \
200 case GIMPLE_BIND: \
201 case GIMPLE_TRY: \
202 case GIMPLE_CATCH: \
203 case GIMPLE_EH_FILTER: \
204 case GIMPLE_TRANSACTION: \
205 /* The sub-statements for these should be walked. */ \
206 *handled_ops_p = false; \
207 break;
209 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
210 (This doesn't include OpenACC 'kernels' decomposed parts.) */
212 static bool
213 is_oacc_parallel_or_serial (omp_context *ctx)
215 enum gimple_code outer_type = gimple_code (ctx->stmt);
216 return ((outer_type == GIMPLE_OMP_TARGET)
217 && ((gimple_omp_target_kind (ctx->stmt)
218 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
219 || (gimple_omp_target_kind (ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
223 /* Return whether CTX represents an OpenACC 'kernels' construct.
224 (This doesn't include OpenACC 'kernels' decomposed parts.) */
226 static bool
227 is_oacc_kernels (omp_context *ctx)
229 enum gimple_code outer_type = gimple_code (ctx->stmt);
230 return ((outer_type == GIMPLE_OMP_TARGET)
231 && (gimple_omp_target_kind (ctx->stmt)
232 == GF_OMP_TARGET_KIND_OACC_KERNELS));
235 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
237 static bool
238 is_oacc_kernels_decomposed_part (omp_context *ctx)
240 enum gimple_code outer_type = gimple_code (ctx->stmt);
241 return ((outer_type == GIMPLE_OMP_TARGET)
242 && ((gimple_omp_target_kind (ctx->stmt)
243 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
244 || (gimple_omp_target_kind (ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
246 || (gimple_omp_target_kind (ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
250 /* Return true if STMT corresponds to an OpenMP target region. */
251 static bool
252 is_omp_target (gimple *stmt)
254 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
256 int kind = gimple_omp_target_kind (stmt);
257 return (kind == GF_OMP_TARGET_KIND_REGION
258 || kind == GF_OMP_TARGET_KIND_DATA
259 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
260 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
262 return false;
265 /* If DECL is the artificial dummy VAR_DECL created for non-static
266 data member privatization, return the underlying "this" parameter,
267 otherwise return NULL. */
269 tree
270 omp_member_access_dummy_var (tree decl)
272 if (!VAR_P (decl)
273 || !DECL_ARTIFICIAL (decl)
274 || !DECL_IGNORED_P (decl)
275 || !DECL_HAS_VALUE_EXPR_P (decl)
276 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
277 return NULL_TREE;
279 tree v = DECL_VALUE_EXPR (decl);
280 if (TREE_CODE (v) != COMPONENT_REF)
281 return NULL_TREE;
283 while (1)
284 switch (TREE_CODE (v))
286 case COMPONENT_REF:
287 case MEM_REF:
288 case INDIRECT_REF:
289 CASE_CONVERT:
290 case POINTER_PLUS_EXPR:
291 v = TREE_OPERAND (v, 0);
292 continue;
293 case PARM_DECL:
294 if (DECL_CONTEXT (v) == current_function_decl
295 && DECL_ARTIFICIAL (v)
296 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
297 return v;
298 return NULL_TREE;
299 default:
300 return NULL_TREE;
304 /* Helper for unshare_and_remap, called through walk_tree. */
306 static tree
307 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
309 tree *pair = (tree *) data;
310 if (*tp == pair[0])
312 *tp = unshare_expr (pair[1]);
313 *walk_subtrees = 0;
315 else if (IS_TYPE_OR_DECL_P (*tp))
316 *walk_subtrees = 0;
317 return NULL_TREE;
320 /* Return unshare_expr (X) with all occurrences of FROM
321 replaced with TO. */
323 static tree
324 unshare_and_remap (tree x, tree from, tree to)
326 tree pair[2] = { from, to };
327 x = unshare_expr (x);
328 walk_tree (&x, unshare_and_remap_1, pair, NULL);
329 return x;
332 /* Convenience function for calling scan_omp_1_op on tree operands. */
334 static inline tree
335 scan_omp_op (tree *tp, omp_context *ctx)
337 struct walk_stmt_info wi;
339 memset (&wi, 0, sizeof (wi));
340 wi.info = ctx;
341 wi.want_locations = true;
343 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
346 static void lower_omp (gimple_seq *, omp_context *);
347 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
348 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
350 /* Return true if CTX is for an omp parallel. */
352 static inline bool
353 is_parallel_ctx (omp_context *ctx)
355 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
359 /* Return true if CTX is for an omp task. */
361 static inline bool
362 is_task_ctx (omp_context *ctx)
364 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
368 /* Return true if CTX is for an omp taskloop. */
370 static inline bool
371 is_taskloop_ctx (omp_context *ctx)
373 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
374 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
378 /* Return true if CTX is for a host omp teams. */
380 static inline bool
381 is_host_teams_ctx (omp_context *ctx)
383 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
384 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
387 /* Return true if CTX is for an omp parallel or omp task or host omp teams
388 (the last one is strictly not a task region in OpenMP speak, but we
389 need to treat it similarly). */
391 static inline bool
392 is_taskreg_ctx (omp_context *ctx)
394 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
397 /* Return true if EXPR is variable sized. */
399 static inline bool
400 is_variable_sized (const_tree expr)
402 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
405 /* Lookup variables. The "maybe" form
406 allows for the variable form to not have been entered, otherwise we
407 assert that the variable must have been entered. */
409 static inline tree
410 lookup_decl (tree var, omp_context *ctx)
412 tree *n = ctx->cb.decl_map->get (var);
413 return *n;
416 static inline tree
417 maybe_lookup_decl (const_tree var, omp_context *ctx)
419 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
420 return n ? *n : NULL_TREE;
423 static inline tree
424 lookup_field (tree var, omp_context *ctx)
426 splay_tree_node n;
427 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
428 return (tree) n->value;
431 static inline tree
432 lookup_sfield (splay_tree_key key, omp_context *ctx)
434 splay_tree_node n;
435 n = splay_tree_lookup (ctx->sfield_map
436 ? ctx->sfield_map : ctx->field_map, key);
437 return (tree) n->value;
440 static inline tree
441 lookup_sfield (tree var, omp_context *ctx)
443 return lookup_sfield ((splay_tree_key) var, ctx);
446 static inline tree
447 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
449 splay_tree_node n;
450 n = splay_tree_lookup (ctx->field_map, key);
451 return n ? (tree) n->value : NULL_TREE;
454 static inline tree
455 maybe_lookup_field (tree var, omp_context *ctx)
457 return maybe_lookup_field ((splay_tree_key) var, ctx);
460 /* Return true if DECL should be copied by pointer. SHARED_CTX is
461 the parallel context if DECL is to be shared. */
463 static bool
464 use_pointer_for_field (tree decl, omp_context *shared_ctx)
466 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
467 || TYPE_ATOMIC (TREE_TYPE (decl)))
468 return true;
470 /* We can only use copy-in/copy-out semantics for shared variables
471 when we know the value is not accessible from an outer scope. */
472 if (shared_ctx)
474 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
476 /* ??? Trivially accessible from anywhere. But why would we even
477 be passing an address in this case? Should we simply assert
478 this to be false, or should we have a cleanup pass that removes
479 these from the list of mappings? */
480 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
481 return true;
483 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
484 without analyzing the expression whether or not its location
485 is accessible to anyone else. In the case of nested parallel
486 regions it certainly may be. */
487 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
488 return true;
490 /* Do not use copy-in/copy-out for variables that have their
491 address taken. */
492 if (is_global_var (decl))
494 /* For file scope vars, track whether we've seen them as
495 non-addressable initially and in that case, keep the same
496 answer for the duration of the pass, even when they are made
497 addressable later on e.g. through reduction expansion. Global
498 variables which weren't addressable before the pass will not
499 have their privatized copies address taken. See PR91216. */
500 if (!TREE_ADDRESSABLE (decl))
502 if (!global_nonaddressable_vars)
503 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
504 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
506 else if (!global_nonaddressable_vars
507 || !bitmap_bit_p (global_nonaddressable_vars,
508 DECL_UID (decl)))
509 return true;
511 else if (TREE_ADDRESSABLE (decl))
512 return true;
514 /* lower_send_shared_vars only uses copy-in, but not copy-out
515 for these. */
516 if (TREE_READONLY (decl)
517 || ((TREE_CODE (decl) == RESULT_DECL
518 || TREE_CODE (decl) == PARM_DECL)
519 && DECL_BY_REFERENCE (decl)))
520 return false;
522 /* Disallow copy-in/out in nested parallel if
523 decl is shared in outer parallel, otherwise
524 each thread could store the shared variable
525 in its own copy-in location, making the
526 variable no longer really shared. */
527 if (shared_ctx->is_nested)
529 omp_context *up;
531 for (up = shared_ctx->outer; up; up = up->outer)
532 if ((is_taskreg_ctx (up)
533 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
534 && is_gimple_omp_offloaded (up->stmt)))
535 && maybe_lookup_decl (decl, up))
536 break;
538 if (up)
540 tree c;
542 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
544 for (c = gimple_omp_target_clauses (up->stmt);
545 c; c = OMP_CLAUSE_CHAIN (c))
546 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
547 && OMP_CLAUSE_DECL (c) == decl)
548 break;
550 else
551 for (c = gimple_omp_taskreg_clauses (up->stmt);
552 c; c = OMP_CLAUSE_CHAIN (c))
553 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
554 && OMP_CLAUSE_DECL (c) == decl)
555 break;
557 if (c)
558 goto maybe_mark_addressable_and_ret;
562 /* For tasks avoid using copy-in/out. As tasks can be
563 deferred or executed in different thread, when GOMP_task
564 returns, the task hasn't necessarily terminated. */
565 if (is_task_ctx (shared_ctx))
567 tree outer;
568 maybe_mark_addressable_and_ret:
569 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
570 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
572 /* Taking address of OUTER in lower_send_shared_vars
573 might need regimplification of everything that uses the
574 variable. */
575 if (!make_addressable_vars)
576 make_addressable_vars = BITMAP_ALLOC (NULL);
577 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
578 TREE_ADDRESSABLE (outer) = 1;
580 return true;
584 return false;
587 /* Construct a new automatic decl similar to VAR. */
589 static tree
590 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
592 tree copy = copy_var_decl (var, name, type);
594 DECL_CONTEXT (copy) = current_function_decl;
596 if (ctx)
598 DECL_CHAIN (copy) = ctx->block_vars;
599 ctx->block_vars = copy;
601 else
602 record_vars (copy);
604 /* If VAR is listed in make_addressable_vars, it wasn't
605 originally addressable, but was only later made so.
606 We don't need to take address of privatizations
607 from that var. */
608 if (TREE_ADDRESSABLE (var)
609 && ((make_addressable_vars
610 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
611 || (global_nonaddressable_vars
612 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
613 TREE_ADDRESSABLE (copy) = 0;
615 return copy;
618 static tree
619 omp_copy_decl_1 (tree var, omp_context *ctx)
621 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
624 /* Build tree nodes to access the field for VAR on the receiver side. */
626 static tree
627 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
629 tree x, field = lookup_field (var, ctx);
631 /* If the receiver record type was remapped in the child function,
632 remap the field into the new record type. */
633 x = maybe_lookup_field (field, ctx);
634 if (x != NULL)
635 field = x;
637 x = build_simple_mem_ref (ctx->receiver_decl);
638 TREE_THIS_NOTRAP (x) = 1;
639 x = omp_build_component_ref (x, field);
640 if (by_ref)
642 x = build_simple_mem_ref (x);
643 TREE_THIS_NOTRAP (x) = 1;
646 return x;
649 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
650 of a parallel, this is a component reference; for workshare constructs
651 this is some variable. */
653 static tree
654 build_outer_var_ref (tree var, omp_context *ctx,
655 enum omp_clause_code code = OMP_CLAUSE_ERROR)
657 tree x;
658 omp_context *outer = ctx->outer;
659 for (; outer; outer = outer->outer)
661 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
662 continue;
663 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
664 && !maybe_lookup_decl (var, outer))
665 continue;
666 break;
669 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
670 x = var;
671 else if (is_variable_sized (var))
673 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
674 x = build_outer_var_ref (x, ctx, code);
675 x = build_simple_mem_ref (x);
677 else if (is_taskreg_ctx (ctx))
679 bool by_ref = use_pointer_for_field (var, NULL);
680 x = build_receiver_ref (var, by_ref, ctx);
682 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
683 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
684 || ctx->loop_p
685 || (code == OMP_CLAUSE_PRIVATE
686 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
687 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
688 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
690 /* #pragma omp simd isn't a worksharing construct, and can reference
691 even private vars in its linear etc. clauses.
692 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
693 to private vars in all worksharing constructs. */
694 x = NULL_TREE;
695 if (outer && is_taskreg_ctx (outer))
696 x = lookup_decl (var, outer);
697 else if (outer)
698 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
699 if (x == NULL_TREE)
700 x = var;
702 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
704 gcc_assert (outer);
705 splay_tree_node n
706 = splay_tree_lookup (outer->field_map,
707 (splay_tree_key) &DECL_UID (var));
708 if (n == NULL)
710 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
711 x = var;
712 else
713 x = lookup_decl (var, outer);
715 else
717 tree field = (tree) n->value;
718 /* If the receiver record type was remapped in the child function,
719 remap the field into the new record type. */
720 x = maybe_lookup_field (field, outer);
721 if (x != NULL)
722 field = x;
724 x = build_simple_mem_ref (outer->receiver_decl);
725 x = omp_build_component_ref (x, field);
726 if (use_pointer_for_field (var, outer))
727 x = build_simple_mem_ref (x);
730 else if (outer)
731 x = lookup_decl (var, outer);
732 else if (omp_privatize_by_reference (var))
733 /* This can happen with orphaned constructs. If var is reference, it is
734 possible it is shared and as such valid. */
735 x = var;
736 else if (omp_member_access_dummy_var (var))
737 x = var;
738 else
739 gcc_unreachable ();
741 if (x == var)
743 tree t = omp_member_access_dummy_var (var);
744 if (t)
746 x = DECL_VALUE_EXPR (var);
747 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
748 if (o != t)
749 x = unshare_and_remap (x, t, o);
750 else
751 x = unshare_expr (x);
755 if (omp_privatize_by_reference (var))
756 x = build_simple_mem_ref (x);
758 return x;
761 /* Build tree nodes to access the field for VAR on the sender side. */
763 static tree
764 build_sender_ref (splay_tree_key key, omp_context *ctx)
766 tree field = lookup_sfield (key, ctx);
767 return omp_build_component_ref (ctx->sender_decl, field);
770 static tree
771 build_sender_ref (tree var, omp_context *ctx)
773 return build_sender_ref ((splay_tree_key) var, ctx);
776 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
777 BASE_POINTERS_RESTRICT, declare the field with restrict. */
779 static void
780 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
782 tree field, type, sfield = NULL_TREE;
783 splay_tree_key key = (splay_tree_key) var;
785 if ((mask & 16) != 0)
787 key = (splay_tree_key) &DECL_NAME (var);
788 gcc_checking_assert (key != (splay_tree_key) var);
790 if ((mask & 8) != 0)
792 key = (splay_tree_key) &DECL_UID (var);
793 gcc_checking_assert (key != (splay_tree_key) var);
795 gcc_assert ((mask & 1) == 0
796 || !splay_tree_lookup (ctx->field_map, key));
797 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
798 || !splay_tree_lookup (ctx->sfield_map, key));
799 gcc_assert ((mask & 3) == 3
800 || !is_gimple_omp_oacc (ctx->stmt));
802 type = TREE_TYPE (var);
803 if ((mask & 16) != 0)
804 type = lang_hooks.decls.omp_array_data (var, true);
806 /* Prevent redeclaring the var in the split-off function with a restrict
807 pointer type. Note that we only clear type itself, restrict qualifiers in
808 the pointed-to type will be ignored by points-to analysis. */
809 if (POINTER_TYPE_P (type)
810 && TYPE_RESTRICT (type))
811 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
813 if (mask & 4)
815 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
816 type = build_pointer_type (build_pointer_type (type));
818 else if (by_ref)
819 type = build_pointer_type (type);
820 else if ((mask & (32 | 3)) == 1
821 && omp_privatize_by_reference (var))
822 type = TREE_TYPE (type);
824 field = build_decl (DECL_SOURCE_LOCATION (var),
825 FIELD_DECL, DECL_NAME (var), type);
827 /* Remember what variable this field was created for. This does have a
828 side effect of making dwarf2out ignore this member, so for helpful
829 debugging we clear it later in delete_omp_context. */
830 DECL_ABSTRACT_ORIGIN (field) = var;
831 if ((mask & 16) == 0 && type == TREE_TYPE (var))
833 SET_DECL_ALIGN (field, DECL_ALIGN (var));
834 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
835 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
837 else
838 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
840 if ((mask & 3) == 3)
842 insert_field_into_struct (ctx->record_type, field);
843 if (ctx->srecord_type)
845 sfield = build_decl (DECL_SOURCE_LOCATION (var),
846 FIELD_DECL, DECL_NAME (var), type);
847 DECL_ABSTRACT_ORIGIN (sfield) = var;
848 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
849 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
850 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
851 insert_field_into_struct (ctx->srecord_type, sfield);
854 else
856 if (ctx->srecord_type == NULL_TREE)
858 tree t;
860 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
861 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
862 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
864 sfield = build_decl (DECL_SOURCE_LOCATION (t),
865 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
866 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
867 insert_field_into_struct (ctx->srecord_type, sfield);
868 splay_tree_insert (ctx->sfield_map,
869 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
870 (splay_tree_value) sfield);
873 sfield = field;
874 insert_field_into_struct ((mask & 1) ? ctx->record_type
875 : ctx->srecord_type, field);
878 if (mask & 1)
879 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
880 if ((mask & 2) && ctx->sfield_map)
881 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
884 static tree
885 install_var_local (tree var, omp_context *ctx)
887 tree new_var = omp_copy_decl_1 (var, ctx);
888 insert_decl_map (&ctx->cb, var, new_var);
889 return new_var;
892 /* Adjust the replacement for DECL in CTX for the new context. This means
893 copying the DECL_VALUE_EXPR, and fixing up the type. */
895 static void
896 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
898 tree new_decl, size;
900 new_decl = lookup_decl (decl, ctx);
902 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
904 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
905 && DECL_HAS_VALUE_EXPR_P (decl))
907 tree ve = DECL_VALUE_EXPR (decl);
908 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
909 SET_DECL_VALUE_EXPR (new_decl, ve);
910 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
913 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
915 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
916 if (size == error_mark_node)
917 size = TYPE_SIZE (TREE_TYPE (new_decl));
918 DECL_SIZE (new_decl) = size;
920 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
921 if (size == error_mark_node)
922 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
923 DECL_SIZE_UNIT (new_decl) = size;
927 /* The callback for remap_decl. Search all containing contexts for a
928 mapping of the variable; this avoids having to duplicate the splay
929 tree ahead of time. We know a mapping doesn't already exist in the
930 given context. Create new mappings to implement default semantics. */
932 static tree
933 omp_copy_decl (tree var, copy_body_data *cb)
935 omp_context *ctx = (omp_context *) cb;
936 tree new_var;
938 if (TREE_CODE (var) == LABEL_DECL)
940 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
941 return var;
942 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
943 DECL_CONTEXT (new_var) = current_function_decl;
944 insert_decl_map (&ctx->cb, var, new_var);
945 return new_var;
948 while (!is_taskreg_ctx (ctx))
950 ctx = ctx->outer;
951 if (ctx == NULL)
952 return var;
953 new_var = maybe_lookup_decl (var, ctx);
954 if (new_var)
955 return new_var;
958 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
959 return var;
961 return error_mark_node;
964 /* Create a new context, with OUTER_CTX being the surrounding context. */
966 static omp_context *
967 new_omp_context (gimple *stmt, omp_context *outer_ctx)
969 omp_context *ctx = XCNEW (omp_context);
971 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
972 (splay_tree_value) ctx);
973 ctx->stmt = stmt;
975 if (outer_ctx)
977 ctx->outer = outer_ctx;
978 ctx->cb = outer_ctx->cb;
979 ctx->cb.block = NULL;
980 ctx->depth = outer_ctx->depth + 1;
982 else
984 ctx->cb.src_fn = current_function_decl;
985 ctx->cb.dst_fn = current_function_decl;
986 ctx->cb.src_node = cgraph_node::get (current_function_decl);
987 gcc_checking_assert (ctx->cb.src_node);
988 ctx->cb.dst_node = ctx->cb.src_node;
989 ctx->cb.src_cfun = cfun;
990 ctx->cb.copy_decl = omp_copy_decl;
991 ctx->cb.eh_lp_nr = 0;
992 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
993 ctx->cb.adjust_array_error_bounds = true;
994 ctx->cb.dont_remap_vla_if_no_change = true;
995 ctx->depth = 1;
998 ctx->cb.decl_map = new hash_map<tree, tree>;
1000 return ctx;
1003 static gimple_seq maybe_catch_exception (gimple_seq);
1005 /* Finalize task copyfn. */
1007 static void
1008 finalize_task_copyfn (gomp_task *task_stmt)
1010 struct function *child_cfun;
1011 tree child_fn;
1012 gimple_seq seq = NULL, new_seq;
1013 gbind *bind;
1015 child_fn = gimple_omp_task_copy_fn (task_stmt);
1016 if (child_fn == NULL_TREE)
1017 return;
1019 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1020 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1022 push_cfun (child_cfun);
1023 bind = gimplify_body (child_fn, false);
1024 gimple_seq_add_stmt (&seq, bind);
1025 new_seq = maybe_catch_exception (seq);
1026 if (new_seq != seq)
1028 bind = gimple_build_bind (NULL, new_seq, NULL);
1029 seq = NULL;
1030 gimple_seq_add_stmt (&seq, bind);
1032 gimple_set_body (child_fn, seq);
1033 pop_cfun ();
1035 /* Inform the callgraph about the new function. */
1036 cgraph_node *node = cgraph_node::get_create (child_fn);
1037 node->parallelized_function = 1;
1038 cgraph_node::add_new_function (child_fn, false);
1041 /* Destroy a omp_context data structures. Called through the splay tree
1042 value delete callback. */
1044 static void
1045 delete_omp_context (splay_tree_value value)
1047 omp_context *ctx = (omp_context *) value;
1049 delete ctx->cb.decl_map;
1051 if (ctx->field_map)
1052 splay_tree_delete (ctx->field_map);
1053 if (ctx->sfield_map)
1054 splay_tree_delete (ctx->sfield_map);
1056 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1057 it produces corrupt debug information. */
1058 if (ctx->record_type)
1060 tree t;
1061 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1062 DECL_ABSTRACT_ORIGIN (t) = NULL;
1064 if (ctx->srecord_type)
1066 tree t;
1067 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1068 DECL_ABSTRACT_ORIGIN (t) = NULL;
1071 if (ctx->task_reduction_map)
1073 ctx->task_reductions.release ();
1074 delete ctx->task_reduction_map;
1077 delete ctx->lastprivate_conditional_map;
1078 delete ctx->allocate_map;
1080 XDELETE (ctx);
1083 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1084 context. */
1086 static void
1087 fixup_child_record_type (omp_context *ctx)
1089 tree f, type = ctx->record_type;
1091 if (!ctx->receiver_decl)
1092 return;
1093 /* ??? It isn't sufficient to just call remap_type here, because
1094 variably_modified_type_p doesn't work the way we expect for
1095 record types. Testing each field for whether it needs remapping
1096 and creating a new record by hand works, however. */
1097 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1098 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1099 break;
1100 if (f)
1102 tree name, new_fields = NULL;
1104 type = lang_hooks.types.make_type (RECORD_TYPE);
1105 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1106 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1107 TYPE_DECL, name, type);
1108 TYPE_NAME (type) = name;
1110 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1112 tree new_f = copy_node (f);
1113 DECL_CONTEXT (new_f) = type;
1114 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1115 DECL_CHAIN (new_f) = new_fields;
1116 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1117 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1118 &ctx->cb, NULL);
1119 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1120 &ctx->cb, NULL);
1121 new_fields = new_f;
1123 /* Arrange to be able to look up the receiver field
1124 given the sender field. */
1125 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1126 (splay_tree_value) new_f);
1128 TYPE_FIELDS (type) = nreverse (new_fields);
1129 layout_type (type);
1132 /* In a target region we never modify any of the pointers in *.omp_data_i,
1133 so attempt to help the optimizers. */
1134 if (is_gimple_omp_offloaded (ctx->stmt))
1135 type = build_qualified_type (type, TYPE_QUAL_CONST);
1137 TREE_TYPE (ctx->receiver_decl)
1138 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1141 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1142 specified by CLAUSES. */
1144 static void
1145 scan_sharing_clauses (tree clauses, omp_context *ctx)
1147 tree c, decl;
1148 bool scan_array_reductions = false;
1150 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1152 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1153 /* omp_default_mem_alloc is 1 */
1154 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1155 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1157 if (ctx->allocate_map == NULL)
1158 ctx->allocate_map = new hash_map<tree, tree>;
1159 tree val = integer_zero_node;
1160 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1161 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1162 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1163 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1164 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1167 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1169 bool by_ref;
1171 switch (OMP_CLAUSE_CODE (c))
1173 case OMP_CLAUSE_PRIVATE:
1174 decl = OMP_CLAUSE_DECL (c);
1175 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1176 goto do_private;
1177 else if (!is_variable_sized (decl))
1178 install_var_local (decl, ctx);
1179 break;
1181 case OMP_CLAUSE_SHARED:
1182 decl = OMP_CLAUSE_DECL (c);
1183 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1184 ctx->allocate_map->remove (decl);
1185 /* Ignore shared directives in teams construct inside of
1186 target construct. */
1187 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1188 && !is_host_teams_ctx (ctx))
1190 /* Global variables don't need to be copied,
1191 the receiver side will use them directly. */
1192 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1193 if (is_global_var (odecl))
1194 break;
1195 insert_decl_map (&ctx->cb, decl, odecl);
1196 break;
1198 gcc_assert (is_taskreg_ctx (ctx));
1199 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1200 || !is_variable_sized (decl));
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1204 break;
1205 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1207 use_pointer_for_field (decl, ctx);
1208 break;
1210 by_ref = use_pointer_for_field (decl, NULL);
1211 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1212 || TREE_ADDRESSABLE (decl)
1213 || by_ref
1214 || omp_privatize_by_reference (decl))
1216 by_ref = use_pointer_for_field (decl, ctx);
1217 install_var_field (decl, by_ref, 3, ctx);
1218 install_var_local (decl, ctx);
1219 break;
1221 /* We don't need to copy const scalar vars back. */
1222 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1223 goto do_private;
1225 case OMP_CLAUSE_REDUCTION:
1226 /* Collect 'reduction' clauses on OpenACC compute construct. */
1227 if (is_gimple_omp_oacc (ctx->stmt)
1228 && is_gimple_omp_offloaded (ctx->stmt))
1230 /* No 'reduction' clauses on OpenACC 'kernels'. */
1231 gcc_checking_assert (!is_oacc_kernels (ctx));
1232 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1233 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1235 ctx->local_reduction_clauses
1236 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1238 /* FALLTHRU */
1240 case OMP_CLAUSE_IN_REDUCTION:
1241 decl = OMP_CLAUSE_DECL (c);
1242 if (ctx->allocate_map
1243 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1244 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1245 || OMP_CLAUSE_REDUCTION_TASK (c)))
1246 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1247 || is_task_ctx (ctx)))
1249 /* For now. */
1250 if (ctx->allocate_map->get (decl))
1251 ctx->allocate_map->remove (decl);
1253 if (TREE_CODE (decl) == MEM_REF)
1255 tree t = TREE_OPERAND (decl, 0);
1256 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1257 t = TREE_OPERAND (t, 0);
1258 if (TREE_CODE (t) == INDIRECT_REF
1259 || TREE_CODE (t) == ADDR_EXPR)
1260 t = TREE_OPERAND (t, 0);
1261 if (is_omp_target (ctx->stmt))
1263 if (is_variable_sized (t))
1265 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1266 t = DECL_VALUE_EXPR (t);
1267 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1268 t = TREE_OPERAND (t, 0);
1269 gcc_assert (DECL_P (t));
1271 tree at = t;
1272 if (ctx->outer)
1273 scan_omp_op (&at, ctx->outer);
1274 tree nt = omp_copy_decl_1 (at, ctx->outer);
1275 splay_tree_insert (ctx->field_map,
1276 (splay_tree_key) &DECL_CONTEXT (t),
1277 (splay_tree_value) nt);
1278 if (at != t)
1279 splay_tree_insert (ctx->field_map,
1280 (splay_tree_key) &DECL_CONTEXT (at),
1281 (splay_tree_value) nt);
1282 break;
1284 install_var_local (t, ctx);
1285 if (is_taskreg_ctx (ctx)
1286 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1287 || (is_task_ctx (ctx)
1288 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1289 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1290 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1291 == POINTER_TYPE)))))
1292 && !is_variable_sized (t)
1293 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1294 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1295 && !is_task_ctx (ctx))))
1297 by_ref = use_pointer_for_field (t, NULL);
1298 if (is_task_ctx (ctx)
1299 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1300 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1302 install_var_field (t, false, 1, ctx);
1303 install_var_field (t, by_ref, 2, ctx);
1305 else
1306 install_var_field (t, by_ref, 3, ctx);
1308 break;
1310 if (is_omp_target (ctx->stmt))
1312 tree at = decl;
1313 if (ctx->outer)
1314 scan_omp_op (&at, ctx->outer);
1315 tree nt = omp_copy_decl_1 (at, ctx->outer);
1316 splay_tree_insert (ctx->field_map,
1317 (splay_tree_key) &DECL_CONTEXT (decl),
1318 (splay_tree_value) nt);
1319 if (at != decl)
1320 splay_tree_insert (ctx->field_map,
1321 (splay_tree_key) &DECL_CONTEXT (at),
1322 (splay_tree_value) nt);
1323 break;
1325 if (is_task_ctx (ctx)
1326 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1327 && OMP_CLAUSE_REDUCTION_TASK (c)
1328 && is_parallel_ctx (ctx)))
1330 /* Global variables don't need to be copied,
1331 the receiver side will use them directly. */
1332 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1334 by_ref = use_pointer_for_field (decl, ctx);
1335 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1336 install_var_field (decl, by_ref, 3, ctx);
1338 install_var_local (decl, ctx);
1339 break;
1341 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1342 && OMP_CLAUSE_REDUCTION_TASK (c))
1344 install_var_local (decl, ctx);
1345 break;
1347 goto do_private;
1349 case OMP_CLAUSE_LASTPRIVATE:
1350 /* Let the corresponding firstprivate clause create
1351 the variable. */
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1356 case OMP_CLAUSE_FIRSTPRIVATE:
1357 case OMP_CLAUSE_LINEAR:
1358 decl = OMP_CLAUSE_DECL (c);
1359 do_private:
1360 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1361 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1362 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1363 && is_gimple_omp_offloaded (ctx->stmt))
1365 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1367 by_ref = !omp_privatize_by_reference (decl);
1368 install_var_field (decl, by_ref, 3, ctx);
1370 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1372 if (TREE_CODE (decl) == INDIRECT_REF)
1373 decl = TREE_OPERAND (decl, 0);
1374 install_var_field (decl, true, 3, ctx);
1376 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1377 install_var_field (decl, true, 3, ctx);
1378 else
1379 install_var_field (decl, false, 3, ctx);
1381 if (is_variable_sized (decl))
1383 if (is_task_ctx (ctx))
1385 if (ctx->allocate_map
1386 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1388 /* For now. */
1389 if (ctx->allocate_map->get (decl))
1390 ctx->allocate_map->remove (decl);
1392 install_var_field (decl, false, 1, ctx);
1394 break;
1396 else if (is_taskreg_ctx (ctx))
1398 bool global
1399 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1400 by_ref = use_pointer_for_field (decl, NULL);
1402 if (is_task_ctx (ctx)
1403 && (global || by_ref || omp_privatize_by_reference (decl)))
1405 if (ctx->allocate_map
1406 && ctx->allocate_map->get (decl))
1407 install_var_field (decl, by_ref, 32 | 1, ctx);
1408 else
1409 install_var_field (decl, false, 1, ctx);
1410 if (!global)
1411 install_var_field (decl, by_ref, 2, ctx);
1413 else if (!global)
1414 install_var_field (decl, by_ref, 3, ctx);
1416 install_var_local (decl, ctx);
1417 break;
1419 case OMP_CLAUSE_USE_DEVICE_PTR:
1420 case OMP_CLAUSE_USE_DEVICE_ADDR:
1421 decl = OMP_CLAUSE_DECL (c);
1423 /* Fortran array descriptors. */
1424 if (lang_hooks.decls.omp_array_data (decl, true))
1425 install_var_field (decl, false, 19, ctx);
1426 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1427 && !omp_privatize_by_reference (decl)
1428 && !omp_is_allocatable_or_ptr (decl))
1429 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1430 install_var_field (decl, true, 11, ctx);
1431 else
1432 install_var_field (decl, false, 11, ctx);
1433 if (DECL_SIZE (decl)
1434 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1436 tree decl2 = DECL_VALUE_EXPR (decl);
1437 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1438 decl2 = TREE_OPERAND (decl2, 0);
1439 gcc_assert (DECL_P (decl2));
1440 install_var_local (decl2, ctx);
1442 install_var_local (decl, ctx);
1443 break;
1445 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1446 decl = OMP_CLAUSE_DECL (c);
1447 while (TREE_CODE (decl) == INDIRECT_REF
1448 || TREE_CODE (decl) == ARRAY_REF)
1449 decl = TREE_OPERAND (decl, 0);
1450 goto do_private;
1452 case OMP_CLAUSE_IS_DEVICE_PTR:
1453 decl = OMP_CLAUSE_DECL (c);
1454 goto do_private;
1456 case OMP_CLAUSE__LOOPTEMP_:
1457 case OMP_CLAUSE__REDUCTEMP_:
1458 gcc_assert (is_taskreg_ctx (ctx));
1459 decl = OMP_CLAUSE_DECL (c);
1460 install_var_field (decl, false, 3, ctx);
1461 install_var_local (decl, ctx);
1462 break;
1464 case OMP_CLAUSE_COPYPRIVATE:
1465 case OMP_CLAUSE_COPYIN:
1466 decl = OMP_CLAUSE_DECL (c);
1467 by_ref = use_pointer_for_field (decl, NULL);
1468 install_var_field (decl, by_ref, 3, ctx);
1469 break;
1471 case OMP_CLAUSE_FINAL:
1472 case OMP_CLAUSE_IF:
1473 case OMP_CLAUSE_NUM_THREADS:
1474 case OMP_CLAUSE_NUM_TEAMS:
1475 case OMP_CLAUSE_THREAD_LIMIT:
1476 case OMP_CLAUSE_DEVICE:
1477 case OMP_CLAUSE_SCHEDULE:
1478 case OMP_CLAUSE_DIST_SCHEDULE:
1479 case OMP_CLAUSE_DEPEND:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_NUM_GANGS:
1484 case OMP_CLAUSE_NUM_WORKERS:
1485 case OMP_CLAUSE_VECTOR_LENGTH:
1486 case OMP_CLAUSE_DETACH:
1487 case OMP_CLAUSE_FILTER:
1488 if (ctx->outer)
1489 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1490 break;
1492 case OMP_CLAUSE_TO:
1493 case OMP_CLAUSE_FROM:
1494 case OMP_CLAUSE_MAP:
1495 if (ctx->outer)
1496 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1497 decl = OMP_CLAUSE_DECL (c);
1498 /* If requested, make 'decl' addressable. */
1499 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1500 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1502 gcc_checking_assert (DECL_P (decl));
1504 bool decl_addressable = TREE_ADDRESSABLE (decl);
1505 if (!decl_addressable)
1507 if (!make_addressable_vars)
1508 make_addressable_vars = BITMAP_ALLOC (NULL);
1509 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1510 TREE_ADDRESSABLE (decl) = 1;
1513 if (dump_enabled_p ())
1515 location_t loc = OMP_CLAUSE_LOCATION (c);
1516 const dump_user_location_t d_u_loc
1517 = dump_user_location_t::from_location_t (loc);
1518 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1519 #if __GNUC__ >= 10
1520 # pragma GCC diagnostic push
1521 # pragma GCC diagnostic ignored "-Wformat"
1522 #endif
1523 if (!decl_addressable)
1524 dump_printf_loc (MSG_NOTE, d_u_loc,
1525 "variable %<%T%>"
1526 " made addressable\n",
1527 decl);
1528 else
1529 dump_printf_loc (MSG_NOTE, d_u_loc,
1530 "variable %<%T%>"
1531 " already made addressable\n",
1532 decl);
1533 #if __GNUC__ >= 10
1534 # pragma GCC diagnostic pop
1535 #endif
1538 /* Done. */
1539 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1541 /* Global variables with "omp declare target" attribute
1542 don't need to be copied, the receiver side will use them
1543 directly. However, global variables with "omp declare target link"
1544 attribute need to be copied. Or when ALWAYS modifier is used. */
1545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1546 && DECL_P (decl)
1547 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1548 && (OMP_CLAUSE_MAP_KIND (c)
1549 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1550 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1551 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1552 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1553 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1554 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1555 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1556 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1557 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1558 && varpool_node::get_create (decl)->offloadable
1559 && !lookup_attribute ("omp declare target link",
1560 DECL_ATTRIBUTES (decl)))
1561 break;
1562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1563 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1565 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1566 not offloaded; there is nothing to map for those. */
1567 if (!is_gimple_omp_offloaded (ctx->stmt)
1568 && !POINTER_TYPE_P (TREE_TYPE (decl))
1569 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1570 break;
1572 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1573 && DECL_P (decl)
1574 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1575 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1576 && is_omp_target (ctx->stmt))
1578 /* If this is an offloaded region, an attach operation should
1579 only exist when the pointer variable is mapped in a prior
1580 clause. */
1581 if (is_gimple_omp_offloaded (ctx->stmt))
1582 gcc_assert
1583 (maybe_lookup_decl (decl, ctx)
1584 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1585 && lookup_attribute ("omp declare target",
1586 DECL_ATTRIBUTES (decl))));
1588 /* By itself, attach/detach is generated as part of pointer
1589 variable mapping and should not create new variables in the
1590 offloaded region, however sender refs for it must be created
1591 for its address to be passed to the runtime. */
1592 tree field
1593 = build_decl (OMP_CLAUSE_LOCATION (c),
1594 FIELD_DECL, NULL_TREE, ptr_type_node);
1595 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1596 insert_field_into_struct (ctx->record_type, field);
1597 /* To not clash with a map of the pointer variable itself,
1598 attach/detach maps have their field looked up by the *clause*
1599 tree expression, not the decl. */
1600 gcc_assert (!splay_tree_lookup (ctx->field_map,
1601 (splay_tree_key) c));
1602 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1603 (splay_tree_value) field);
1604 break;
1606 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1607 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1608 || (OMP_CLAUSE_MAP_KIND (c)
1609 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1611 if (TREE_CODE (decl) == COMPONENT_REF
1612 || (TREE_CODE (decl) == INDIRECT_REF
1613 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1614 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1615 == REFERENCE_TYPE)))
1616 break;
1617 if (DECL_SIZE (decl)
1618 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1620 tree decl2 = DECL_VALUE_EXPR (decl);
1621 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1622 decl2 = TREE_OPERAND (decl2, 0);
1623 gcc_assert (DECL_P (decl2));
1624 install_var_local (decl2, ctx);
1626 install_var_local (decl, ctx);
1627 break;
1629 if (DECL_P (decl))
1631 if (DECL_SIZE (decl)
1632 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1634 tree decl2 = DECL_VALUE_EXPR (decl);
1635 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1636 decl2 = TREE_OPERAND (decl2, 0);
1637 gcc_assert (DECL_P (decl2));
1638 install_var_field (decl2, true, 3, ctx);
1639 install_var_local (decl2, ctx);
1640 install_var_local (decl, ctx);
1642 else
1644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1645 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1646 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1647 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1648 install_var_field (decl, true, 7, ctx);
1649 else
1650 install_var_field (decl, true, 3, ctx);
1651 if (is_gimple_omp_offloaded (ctx->stmt)
1652 && !(is_gimple_omp_oacc (ctx->stmt)
1653 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1654 install_var_local (decl, ctx);
1657 else
1659 tree base = get_base_address (decl);
1660 tree nc = OMP_CLAUSE_CHAIN (c);
1661 if (DECL_P (base)
1662 && nc != NULL_TREE
1663 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1664 && OMP_CLAUSE_DECL (nc) == base
1665 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1666 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1668 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1669 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1671 else
1673 if (ctx->outer)
1675 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1676 decl = OMP_CLAUSE_DECL (c);
1678 gcc_assert (!splay_tree_lookup (ctx->field_map,
1679 (splay_tree_key) decl));
1680 tree field
1681 = build_decl (OMP_CLAUSE_LOCATION (c),
1682 FIELD_DECL, NULL_TREE, ptr_type_node);
1683 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1684 insert_field_into_struct (ctx->record_type, field);
1685 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1686 (splay_tree_value) field);
1689 break;
1691 case OMP_CLAUSE_ORDER:
1692 ctx->order_concurrent = true;
1693 break;
1695 case OMP_CLAUSE_BIND:
1696 ctx->loop_p = true;
1697 break;
1699 case OMP_CLAUSE_NOWAIT:
1700 case OMP_CLAUSE_ORDERED:
1701 case OMP_CLAUSE_COLLAPSE:
1702 case OMP_CLAUSE_UNTIED:
1703 case OMP_CLAUSE_MERGEABLE:
1704 case OMP_CLAUSE_PROC_BIND:
1705 case OMP_CLAUSE_SAFELEN:
1706 case OMP_CLAUSE_SIMDLEN:
1707 case OMP_CLAUSE_THREADS:
1708 case OMP_CLAUSE_SIMD:
1709 case OMP_CLAUSE_NOGROUP:
1710 case OMP_CLAUSE_DEFAULTMAP:
1711 case OMP_CLAUSE_ASYNC:
1712 case OMP_CLAUSE_WAIT:
1713 case OMP_CLAUSE_GANG:
1714 case OMP_CLAUSE_WORKER:
1715 case OMP_CLAUSE_VECTOR:
1716 case OMP_CLAUSE_INDEPENDENT:
1717 case OMP_CLAUSE_AUTO:
1718 case OMP_CLAUSE_SEQ:
1719 case OMP_CLAUSE_TILE:
1720 case OMP_CLAUSE__SIMT_:
1721 case OMP_CLAUSE_DEFAULT:
1722 case OMP_CLAUSE_NONTEMPORAL:
1723 case OMP_CLAUSE_IF_PRESENT:
1724 case OMP_CLAUSE_FINALIZE:
1725 case OMP_CLAUSE_TASK_REDUCTION:
1726 case OMP_CLAUSE_ALLOCATE:
1727 break;
1729 case OMP_CLAUSE_ALIGNED:
1730 decl = OMP_CLAUSE_DECL (c);
1731 if (is_global_var (decl)
1732 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1733 install_var_local (decl, ctx);
1734 break;
1736 case OMP_CLAUSE__CONDTEMP_:
1737 decl = OMP_CLAUSE_DECL (c);
1738 if (is_parallel_ctx (ctx))
1740 install_var_field (decl, false, 3, ctx);
1741 install_var_local (decl, ctx);
1743 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1744 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1745 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1746 install_var_local (decl, ctx);
1747 break;
1749 case OMP_CLAUSE__CACHE_:
1750 case OMP_CLAUSE_NOHOST:
1751 default:
1752 gcc_unreachable ();
1756 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1758 switch (OMP_CLAUSE_CODE (c))
1760 case OMP_CLAUSE_LASTPRIVATE:
1761 /* Let the corresponding firstprivate clause create
1762 the variable. */
1763 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1764 scan_array_reductions = true;
1765 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1766 break;
1767 /* FALLTHRU */
1769 case OMP_CLAUSE_FIRSTPRIVATE:
1770 case OMP_CLAUSE_PRIVATE:
1771 case OMP_CLAUSE_LINEAR:
1772 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1773 case OMP_CLAUSE_IS_DEVICE_PTR:
1774 decl = OMP_CLAUSE_DECL (c);
1775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1777 while (TREE_CODE (decl) == INDIRECT_REF
1778 || TREE_CODE (decl) == ARRAY_REF)
1779 decl = TREE_OPERAND (decl, 0);
1782 if (is_variable_sized (decl))
1784 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1785 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1786 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1787 && is_gimple_omp_offloaded (ctx->stmt))
1789 tree decl2 = DECL_VALUE_EXPR (decl);
1790 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1791 decl2 = TREE_OPERAND (decl2, 0);
1792 gcc_assert (DECL_P (decl2));
1793 install_var_local (decl2, ctx);
1794 fixup_remapped_decl (decl2, ctx, false);
1796 install_var_local (decl, ctx);
1798 fixup_remapped_decl (decl, ctx,
1799 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1800 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1801 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1802 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1803 scan_array_reductions = true;
1804 break;
1806 case OMP_CLAUSE_REDUCTION:
1807 case OMP_CLAUSE_IN_REDUCTION:
1808 decl = OMP_CLAUSE_DECL (c);
1809 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1811 if (is_variable_sized (decl))
1812 install_var_local (decl, ctx);
1813 fixup_remapped_decl (decl, ctx, false);
1815 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1816 scan_array_reductions = true;
1817 break;
1819 case OMP_CLAUSE_TASK_REDUCTION:
1820 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1821 scan_array_reductions = true;
1822 break;
1824 case OMP_CLAUSE_SHARED:
1825 /* Ignore shared directives in teams construct inside of
1826 target construct. */
1827 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1828 && !is_host_teams_ctx (ctx))
1829 break;
1830 decl = OMP_CLAUSE_DECL (c);
1831 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1832 break;
1833 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1835 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1836 ctx->outer)))
1837 break;
1838 bool by_ref = use_pointer_for_field (decl, ctx);
1839 install_var_field (decl, by_ref, 11, ctx);
1840 break;
1842 fixup_remapped_decl (decl, ctx, false);
1843 break;
1845 case OMP_CLAUSE_MAP:
1846 if (!is_gimple_omp_offloaded (ctx->stmt))
1847 break;
1848 decl = OMP_CLAUSE_DECL (c);
1849 if (DECL_P (decl)
1850 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1851 && (OMP_CLAUSE_MAP_KIND (c)
1852 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1853 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1854 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1855 && varpool_node::get_create (decl)->offloadable)
1856 break;
1857 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1858 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1859 && is_omp_target (ctx->stmt)
1860 && !is_gimple_omp_offloaded (ctx->stmt))
1861 break;
1862 if (DECL_P (decl))
1864 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1865 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1866 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1867 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1869 tree new_decl = lookup_decl (decl, ctx);
1870 TREE_TYPE (new_decl)
1871 = remap_type (TREE_TYPE (decl), &ctx->cb);
1873 else if (DECL_SIZE (decl)
1874 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1876 tree decl2 = DECL_VALUE_EXPR (decl);
1877 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1878 decl2 = TREE_OPERAND (decl2, 0);
1879 gcc_assert (DECL_P (decl2));
1880 fixup_remapped_decl (decl2, ctx, false);
1881 fixup_remapped_decl (decl, ctx, true);
1883 else
1884 fixup_remapped_decl (decl, ctx, false);
1886 break;
1888 case OMP_CLAUSE_COPYPRIVATE:
1889 case OMP_CLAUSE_COPYIN:
1890 case OMP_CLAUSE_DEFAULT:
1891 case OMP_CLAUSE_IF:
1892 case OMP_CLAUSE_NUM_THREADS:
1893 case OMP_CLAUSE_NUM_TEAMS:
1894 case OMP_CLAUSE_THREAD_LIMIT:
1895 case OMP_CLAUSE_DEVICE:
1896 case OMP_CLAUSE_SCHEDULE:
1897 case OMP_CLAUSE_DIST_SCHEDULE:
1898 case OMP_CLAUSE_NOWAIT:
1899 case OMP_CLAUSE_ORDERED:
1900 case OMP_CLAUSE_COLLAPSE:
1901 case OMP_CLAUSE_UNTIED:
1902 case OMP_CLAUSE_FINAL:
1903 case OMP_CLAUSE_MERGEABLE:
1904 case OMP_CLAUSE_PROC_BIND:
1905 case OMP_CLAUSE_SAFELEN:
1906 case OMP_CLAUSE_SIMDLEN:
1907 case OMP_CLAUSE_ALIGNED:
1908 case OMP_CLAUSE_DEPEND:
1909 case OMP_CLAUSE_DETACH:
1910 case OMP_CLAUSE_ALLOCATE:
1911 case OMP_CLAUSE__LOOPTEMP_:
1912 case OMP_CLAUSE__REDUCTEMP_:
1913 case OMP_CLAUSE_TO:
1914 case OMP_CLAUSE_FROM:
1915 case OMP_CLAUSE_PRIORITY:
1916 case OMP_CLAUSE_GRAINSIZE:
1917 case OMP_CLAUSE_NUM_TASKS:
1918 case OMP_CLAUSE_THREADS:
1919 case OMP_CLAUSE_SIMD:
1920 case OMP_CLAUSE_NOGROUP:
1921 case OMP_CLAUSE_DEFAULTMAP:
1922 case OMP_CLAUSE_ORDER:
1923 case OMP_CLAUSE_BIND:
1924 case OMP_CLAUSE_USE_DEVICE_PTR:
1925 case OMP_CLAUSE_USE_DEVICE_ADDR:
1926 case OMP_CLAUSE_NONTEMPORAL:
1927 case OMP_CLAUSE_ASYNC:
1928 case OMP_CLAUSE_WAIT:
1929 case OMP_CLAUSE_NUM_GANGS:
1930 case OMP_CLAUSE_NUM_WORKERS:
1931 case OMP_CLAUSE_VECTOR_LENGTH:
1932 case OMP_CLAUSE_GANG:
1933 case OMP_CLAUSE_WORKER:
1934 case OMP_CLAUSE_VECTOR:
1935 case OMP_CLAUSE_INDEPENDENT:
1936 case OMP_CLAUSE_AUTO:
1937 case OMP_CLAUSE_SEQ:
1938 case OMP_CLAUSE_TILE:
1939 case OMP_CLAUSE__SIMT_:
1940 case OMP_CLAUSE_IF_PRESENT:
1941 case OMP_CLAUSE_FINALIZE:
1942 case OMP_CLAUSE_FILTER:
1943 case OMP_CLAUSE__CONDTEMP_:
1944 break;
1946 case OMP_CLAUSE__CACHE_:
1947 case OMP_CLAUSE_NOHOST:
1948 default:
1949 gcc_unreachable ();
1953 gcc_checking_assert (!scan_array_reductions
1954 || !is_gimple_omp_oacc (ctx->stmt));
1955 if (scan_array_reductions)
1957 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1958 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1959 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1960 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1961 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1963 omp_context *rctx = ctx;
1964 if (is_omp_target (ctx->stmt))
1965 rctx = ctx->outer;
1966 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1967 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1969 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1970 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1971 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1972 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1973 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1974 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1978 /* Create a new name for omp child function. Returns an identifier. */
1980 static tree
1981 create_omp_child_function_name (bool task_copy)
1983 return clone_function_name_numbered (current_function_decl,
1984 task_copy ? "_omp_cpyfn" : "_omp_fn");
1987 /* Return true if CTX may belong to offloaded code: either if current function
1988 is offloaded, or any enclosing context corresponds to a target region. */
1990 static bool
1991 omp_maybe_offloaded_ctx (omp_context *ctx)
1993 if (cgraph_node::get (current_function_decl)->offloadable)
1994 return true;
1995 for (; ctx; ctx = ctx->outer)
1996 if (is_gimple_omp_offloaded (ctx->stmt))
1997 return true;
1998 return false;
2001 /* Build a decl for the omp child function. It'll not contain a body
2002 yet, just the bare decl. */
2004 static void
2005 create_omp_child_function (omp_context *ctx, bool task_copy)
2007 tree decl, type, name, t;
2009 name = create_omp_child_function_name (task_copy);
2010 if (task_copy)
2011 type = build_function_type_list (void_type_node, ptr_type_node,
2012 ptr_type_node, NULL_TREE);
2013 else
2014 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2016 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2018 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2019 || !task_copy);
2020 if (!task_copy)
2021 ctx->cb.dst_fn = decl;
2022 else
2023 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2025 TREE_STATIC (decl) = 1;
2026 TREE_USED (decl) = 1;
2027 DECL_ARTIFICIAL (decl) = 1;
2028 DECL_IGNORED_P (decl) = 0;
2029 TREE_PUBLIC (decl) = 0;
2030 DECL_UNINLINABLE (decl) = 1;
2031 DECL_EXTERNAL (decl) = 0;
2032 DECL_CONTEXT (decl) = NULL_TREE;
2033 DECL_INITIAL (decl) = make_node (BLOCK);
2034 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2035 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2036 /* Remove omp declare simd attribute from the new attributes. */
2037 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2039 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2040 a = a2;
2041 a = TREE_CHAIN (a);
2042 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2043 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2044 *p = TREE_CHAIN (*p);
2045 else
2047 tree chain = TREE_CHAIN (*p);
2048 *p = copy_node (*p);
2049 p = &TREE_CHAIN (*p);
2050 *p = chain;
2053 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2054 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2055 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2056 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2057 DECL_FUNCTION_VERSIONED (decl)
2058 = DECL_FUNCTION_VERSIONED (current_function_decl);
2060 if (omp_maybe_offloaded_ctx (ctx))
2062 cgraph_node::get_create (decl)->offloadable = 1;
2063 if (ENABLE_OFFLOADING)
2064 g->have_offload = true;
2067 if (cgraph_node::get_create (decl)->offloadable)
2069 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2070 ? "omp target entrypoint"
2071 : "omp declare target");
2072 if (lookup_attribute ("omp declare target",
2073 DECL_ATTRIBUTES (current_function_decl)))
2075 if (is_gimple_omp_offloaded (ctx->stmt))
2076 DECL_ATTRIBUTES (decl)
2077 = remove_attribute ("omp declare target",
2078 copy_list (DECL_ATTRIBUTES (decl)));
2079 else
2080 target_attr = NULL;
2082 if (target_attr)
2083 DECL_ATTRIBUTES (decl)
2084 = tree_cons (get_identifier (target_attr),
2085 NULL_TREE, DECL_ATTRIBUTES (decl));
2088 t = build_decl (DECL_SOURCE_LOCATION (decl),
2089 RESULT_DECL, NULL_TREE, void_type_node);
2090 DECL_ARTIFICIAL (t) = 1;
2091 DECL_IGNORED_P (t) = 1;
2092 DECL_CONTEXT (t) = decl;
2093 DECL_RESULT (decl) = t;
2095 tree data_name = get_identifier (".omp_data_i");
2096 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2097 ptr_type_node);
2098 DECL_ARTIFICIAL (t) = 1;
2099 DECL_NAMELESS (t) = 1;
2100 DECL_ARG_TYPE (t) = ptr_type_node;
2101 DECL_CONTEXT (t) = current_function_decl;
2102 TREE_USED (t) = 1;
2103 TREE_READONLY (t) = 1;
2104 DECL_ARGUMENTS (decl) = t;
2105 if (!task_copy)
2106 ctx->receiver_decl = t;
2107 else
2109 t = build_decl (DECL_SOURCE_LOCATION (decl),
2110 PARM_DECL, get_identifier (".omp_data_o"),
2111 ptr_type_node);
2112 DECL_ARTIFICIAL (t) = 1;
2113 DECL_NAMELESS (t) = 1;
2114 DECL_ARG_TYPE (t) = ptr_type_node;
2115 DECL_CONTEXT (t) = current_function_decl;
2116 TREE_USED (t) = 1;
2117 TREE_ADDRESSABLE (t) = 1;
2118 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2119 DECL_ARGUMENTS (decl) = t;
2122 /* Allocate memory for the function structure. The call to
2123 allocate_struct_function clobbers CFUN, so we need to restore
2124 it afterward. */
2125 push_struct_function (decl);
2126 cfun->function_end_locus = gimple_location (ctx->stmt);
2127 init_tree_ssa (cfun);
2128 pop_cfun ();
2131 /* Callback for walk_gimple_seq. Check if combined parallel
2132 contains gimple_omp_for_combined_into_p OMP_FOR. */
2134 tree
2135 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2136 bool *handled_ops_p,
2137 struct walk_stmt_info *wi)
2139 gimple *stmt = gsi_stmt (*gsi_p);
2141 *handled_ops_p = true;
2142 switch (gimple_code (stmt))
2144 WALK_SUBSTMTS;
2146 case GIMPLE_OMP_FOR:
2147 if (gimple_omp_for_combined_into_p (stmt)
2148 && gimple_omp_for_kind (stmt)
2149 == *(const enum gf_mask *) (wi->info))
2151 wi->info = stmt;
2152 return integer_zero_node;
2154 break;
2155 default:
2156 break;
2158 return NULL;
2161 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2163 static void
2164 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2165 omp_context *outer_ctx)
2167 struct walk_stmt_info wi;
2169 memset (&wi, 0, sizeof (wi));
2170 wi.val_only = true;
2171 wi.info = (void *) &msk;
2172 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2173 if (wi.info != (void *) &msk)
2175 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2176 struct omp_for_data fd;
2177 omp_extract_for_data (for_stmt, &fd, NULL);
2178 /* We need two temporaries with fd.loop.v type (istart/iend)
2179 and then (fd.collapse - 1) temporaries with the same
2180 type for count2 ... countN-1 vars if not constant. */
2181 size_t count = 2, i;
2182 tree type = fd.iter_type;
2183 if (fd.collapse > 1
2184 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2186 count += fd.collapse - 1;
2187 /* If there are lastprivate clauses on the inner
2188 GIMPLE_OMP_FOR, add one more temporaries for the total number
2189 of iterations (product of count1 ... countN-1). */
2190 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2191 OMP_CLAUSE_LASTPRIVATE)
2192 || (msk == GF_OMP_FOR_KIND_FOR
2193 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2194 OMP_CLAUSE_LASTPRIVATE)))
2196 tree temp = create_tmp_var (type);
2197 tree c = build_omp_clause (UNKNOWN_LOCATION,
2198 OMP_CLAUSE__LOOPTEMP_);
2199 insert_decl_map (&outer_ctx->cb, temp, temp);
2200 OMP_CLAUSE_DECL (c) = temp;
2201 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2202 gimple_omp_taskreg_set_clauses (stmt, c);
2204 if (fd.non_rect
2205 && fd.last_nonrect == fd.first_nonrect + 1)
2206 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2207 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2209 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2210 tree type2 = TREE_TYPE (v);
2211 count++;
2212 for (i = 0; i < 3; i++)
2214 tree temp = create_tmp_var (type2);
2215 tree c = build_omp_clause (UNKNOWN_LOCATION,
2216 OMP_CLAUSE__LOOPTEMP_);
2217 insert_decl_map (&outer_ctx->cb, temp, temp);
2218 OMP_CLAUSE_DECL (c) = temp;
2219 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2220 gimple_omp_taskreg_set_clauses (stmt, c);
2224 for (i = 0; i < count; i++)
2226 tree temp = create_tmp_var (type);
2227 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2228 insert_decl_map (&outer_ctx->cb, temp, temp);
2229 OMP_CLAUSE_DECL (c) = temp;
2230 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2231 gimple_omp_taskreg_set_clauses (stmt, c);
2234 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2235 && omp_find_clause (gimple_omp_task_clauses (stmt),
2236 OMP_CLAUSE_REDUCTION))
2238 tree type = build_pointer_type (pointer_sized_int_node);
2239 tree temp = create_tmp_var (type);
2240 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2241 insert_decl_map (&outer_ctx->cb, temp, temp);
2242 OMP_CLAUSE_DECL (c) = temp;
2243 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2244 gimple_omp_task_set_clauses (stmt, c);
2248 /* Scan an OpenMP parallel directive. */
2250 static void
2251 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2253 omp_context *ctx;
2254 tree name;
2255 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2257 /* Ignore parallel directives with empty bodies, unless there
2258 are copyin clauses. */
2259 if (optimize > 0
2260 && empty_body_p (gimple_omp_body (stmt))
2261 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2262 OMP_CLAUSE_COPYIN) == NULL)
2264 gsi_replace (gsi, gimple_build_nop (), false);
2265 return;
2268 if (gimple_omp_parallel_combined_p (stmt))
2269 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2270 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2271 OMP_CLAUSE_REDUCTION);
2272 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2273 if (OMP_CLAUSE_REDUCTION_TASK (c))
2275 tree type = build_pointer_type (pointer_sized_int_node);
2276 tree temp = create_tmp_var (type);
2277 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2278 if (outer_ctx)
2279 insert_decl_map (&outer_ctx->cb, temp, temp);
2280 OMP_CLAUSE_DECL (c) = temp;
2281 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2282 gimple_omp_parallel_set_clauses (stmt, c);
2283 break;
2285 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2286 break;
2288 ctx = new_omp_context (stmt, outer_ctx);
2289 taskreg_contexts.safe_push (ctx);
2290 if (taskreg_nesting_level > 1)
2291 ctx->is_nested = true;
2292 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2293 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2294 name = create_tmp_var_name (".omp_data_s");
2295 name = build_decl (gimple_location (stmt),
2296 TYPE_DECL, name, ctx->record_type);
2297 DECL_ARTIFICIAL (name) = 1;
2298 DECL_NAMELESS (name) = 1;
2299 TYPE_NAME (ctx->record_type) = name;
2300 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2301 create_omp_child_function (ctx, false);
2302 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2304 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2305 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2307 if (TYPE_FIELDS (ctx->record_type) == NULL)
2308 ctx->record_type = ctx->receiver_decl = NULL;
2311 /* Scan an OpenMP task directive. */
2313 static void
2314 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2316 omp_context *ctx;
2317 tree name, t;
2318 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2320 /* Ignore task directives with empty bodies, unless they have depend
2321 clause. */
2322 if (optimize > 0
2323 && gimple_omp_body (stmt)
2324 && empty_body_p (gimple_omp_body (stmt))
2325 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2327 gsi_replace (gsi, gimple_build_nop (), false);
2328 return;
2331 if (gimple_omp_task_taskloop_p (stmt))
2332 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2334 ctx = new_omp_context (stmt, outer_ctx);
2336 if (gimple_omp_task_taskwait_p (stmt))
2338 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2339 return;
2342 taskreg_contexts.safe_push (ctx);
2343 if (taskreg_nesting_level > 1)
2344 ctx->is_nested = true;
2345 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2346 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2347 name = create_tmp_var_name (".omp_data_s");
2348 name = build_decl (gimple_location (stmt),
2349 TYPE_DECL, name, ctx->record_type);
2350 DECL_ARTIFICIAL (name) = 1;
2351 DECL_NAMELESS (name) = 1;
2352 TYPE_NAME (ctx->record_type) = name;
2353 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2354 create_omp_child_function (ctx, false);
2355 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2357 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2359 if (ctx->srecord_type)
2361 name = create_tmp_var_name (".omp_data_a");
2362 name = build_decl (gimple_location (stmt),
2363 TYPE_DECL, name, ctx->srecord_type);
2364 DECL_ARTIFICIAL (name) = 1;
2365 DECL_NAMELESS (name) = 1;
2366 TYPE_NAME (ctx->srecord_type) = name;
2367 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2368 create_omp_child_function (ctx, true);
2371 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2373 if (TYPE_FIELDS (ctx->record_type) == NULL)
2375 ctx->record_type = ctx->receiver_decl = NULL;
2376 t = build_int_cst (long_integer_type_node, 0);
2377 gimple_omp_task_set_arg_size (stmt, t);
2378 t = build_int_cst (long_integer_type_node, 1);
2379 gimple_omp_task_set_arg_align (stmt, t);
2383 /* Helper function for finish_taskreg_scan, called through walk_tree.
2384 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2385 tree, replace it in the expression. */
2387 static tree
2388 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2390 if (VAR_P (*tp))
2392 omp_context *ctx = (omp_context *) data;
2393 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2394 if (t != *tp)
2396 if (DECL_HAS_VALUE_EXPR_P (t))
2397 t = unshare_expr (DECL_VALUE_EXPR (t));
2398 *tp = t;
2400 *walk_subtrees = 0;
2402 else if (IS_TYPE_OR_DECL_P (*tp))
2403 *walk_subtrees = 0;
2404 return NULL_TREE;
2407 /* If any decls have been made addressable during scan_omp,
2408 adjust their fields if needed, and layout record types
2409 of parallel/task constructs. */
2411 static void
2412 finish_taskreg_scan (omp_context *ctx)
2414 if (ctx->record_type == NULL_TREE)
2415 return;
2417 /* If any make_addressable_vars were needed, verify all
2418 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2419 statements if use_pointer_for_field hasn't changed
2420 because of that. If it did, update field types now. */
2421 if (make_addressable_vars)
2423 tree c;
2425 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2426 c; c = OMP_CLAUSE_CHAIN (c))
2427 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2428 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2430 tree decl = OMP_CLAUSE_DECL (c);
2432 /* Global variables don't need to be copied,
2433 the receiver side will use them directly. */
2434 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2435 continue;
2436 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2437 || !use_pointer_for_field (decl, ctx))
2438 continue;
2439 tree field = lookup_field (decl, ctx);
2440 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2441 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2442 continue;
2443 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2444 TREE_THIS_VOLATILE (field) = 0;
2445 DECL_USER_ALIGN (field) = 0;
2446 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2447 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2448 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2449 if (ctx->srecord_type)
2451 tree sfield = lookup_sfield (decl, ctx);
2452 TREE_TYPE (sfield) = TREE_TYPE (field);
2453 TREE_THIS_VOLATILE (sfield) = 0;
2454 DECL_USER_ALIGN (sfield) = 0;
2455 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2456 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2457 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2462 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2464 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2465 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2466 if (c)
2468 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2469 expects to find it at the start of data. */
2470 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2471 tree *p = &TYPE_FIELDS (ctx->record_type);
2472 while (*p)
2473 if (*p == f)
2475 *p = DECL_CHAIN (*p);
2476 break;
2478 else
2479 p = &DECL_CHAIN (*p);
2480 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2481 TYPE_FIELDS (ctx->record_type) = f;
2483 layout_type (ctx->record_type);
2484 fixup_child_record_type (ctx);
2486 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2488 layout_type (ctx->record_type);
2489 fixup_child_record_type (ctx);
2491 else
2493 location_t loc = gimple_location (ctx->stmt);
2494 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2495 tree detach_clause
2496 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2497 OMP_CLAUSE_DETACH);
2498 /* Move VLA fields to the end. */
2499 p = &TYPE_FIELDS (ctx->record_type);
2500 while (*p)
2501 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2502 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2504 *q = *p;
2505 *p = TREE_CHAIN (*p);
2506 TREE_CHAIN (*q) = NULL_TREE;
2507 q = &TREE_CHAIN (*q);
2509 else
2510 p = &DECL_CHAIN (*p);
2511 *p = vla_fields;
2512 if (gimple_omp_task_taskloop_p (ctx->stmt))
2514 /* Move fields corresponding to first and second _looptemp_
2515 clause first. There are filled by GOMP_taskloop
2516 and thus need to be in specific positions. */
2517 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2518 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2519 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2520 OMP_CLAUSE__LOOPTEMP_);
2521 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2522 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2523 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2524 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2525 p = &TYPE_FIELDS (ctx->record_type);
2526 while (*p)
2527 if (*p == f1 || *p == f2 || *p == f3)
2528 *p = DECL_CHAIN (*p);
2529 else
2530 p = &DECL_CHAIN (*p);
2531 DECL_CHAIN (f1) = f2;
2532 if (c3)
2534 DECL_CHAIN (f2) = f3;
2535 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2537 else
2538 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2539 TYPE_FIELDS (ctx->record_type) = f1;
2540 if (ctx->srecord_type)
2542 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2543 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2544 if (c3)
2545 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2546 p = &TYPE_FIELDS (ctx->srecord_type);
2547 while (*p)
2548 if (*p == f1 || *p == f2 || *p == f3)
2549 *p = DECL_CHAIN (*p);
2550 else
2551 p = &DECL_CHAIN (*p);
2552 DECL_CHAIN (f1) = f2;
2553 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2554 if (c3)
2556 DECL_CHAIN (f2) = f3;
2557 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2559 else
2560 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2561 TYPE_FIELDS (ctx->srecord_type) = f1;
2564 if (detach_clause)
2566 tree c, field;
2568 /* Look for a firstprivate clause with the detach event handle. */
2569 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2570 c; c = OMP_CLAUSE_CHAIN (c))
2572 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2573 continue;
2574 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2575 == OMP_CLAUSE_DECL (detach_clause))
2576 break;
2579 gcc_assert (c);
2580 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2582 /* Move field corresponding to the detach clause first.
2583 This is filled by GOMP_task and needs to be in a
2584 specific position. */
2585 p = &TYPE_FIELDS (ctx->record_type);
2586 while (*p)
2587 if (*p == field)
2588 *p = DECL_CHAIN (*p);
2589 else
2590 p = &DECL_CHAIN (*p);
2591 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2592 TYPE_FIELDS (ctx->record_type) = field;
2593 if (ctx->srecord_type)
2595 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2596 p = &TYPE_FIELDS (ctx->srecord_type);
2597 while (*p)
2598 if (*p == field)
2599 *p = DECL_CHAIN (*p);
2600 else
2601 p = &DECL_CHAIN (*p);
2602 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2603 TYPE_FIELDS (ctx->srecord_type) = field;
2606 layout_type (ctx->record_type);
2607 fixup_child_record_type (ctx);
2608 if (ctx->srecord_type)
2609 layout_type (ctx->srecord_type);
2610 tree t = fold_convert_loc (loc, long_integer_type_node,
2611 TYPE_SIZE_UNIT (ctx->record_type));
2612 if (TREE_CODE (t) != INTEGER_CST)
2614 t = unshare_expr (t);
2615 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2617 gimple_omp_task_set_arg_size (ctx->stmt, t);
2618 t = build_int_cst (long_integer_type_node,
2619 TYPE_ALIGN_UNIT (ctx->record_type));
2620 gimple_omp_task_set_arg_align (ctx->stmt, t);
2624 /* Find the enclosing offload context. */
2626 static omp_context *
2627 enclosing_target_ctx (omp_context *ctx)
2629 for (; ctx; ctx = ctx->outer)
2630 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2631 break;
2633 return ctx;
2636 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2637 construct.
2638 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2640 static bool
2641 ctx_in_oacc_kernels_region (omp_context *ctx)
2643 for (;ctx != NULL; ctx = ctx->outer)
2645 gimple *stmt = ctx->stmt;
2646 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2647 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2648 return true;
2651 return false;
2654 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2655 (This doesn't include OpenACC 'kernels' decomposed parts.)
2656 Until kernels handling moves to use the same loop indirection
2657 scheme as parallel, we need to do this checking early. */
2659 static unsigned
2660 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2662 bool checking = true;
2663 unsigned outer_mask = 0;
2664 unsigned this_mask = 0;
2665 bool has_seq = false, has_auto = false;
2667 if (ctx->outer)
2668 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2669 if (!stmt)
2671 checking = false;
2672 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2673 return outer_mask;
2674 stmt = as_a <gomp_for *> (ctx->stmt);
2677 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2679 switch (OMP_CLAUSE_CODE (c))
2681 case OMP_CLAUSE_GANG:
2682 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2683 break;
2684 case OMP_CLAUSE_WORKER:
2685 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2686 break;
2687 case OMP_CLAUSE_VECTOR:
2688 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2689 break;
2690 case OMP_CLAUSE_SEQ:
2691 has_seq = true;
2692 break;
2693 case OMP_CLAUSE_AUTO:
2694 has_auto = true;
2695 break;
2696 default:
2697 break;
2701 if (checking)
2703 if (has_seq && (this_mask || has_auto))
2704 error_at (gimple_location (stmt), "%<seq%> overrides other"
2705 " OpenACC loop specifiers");
2706 else if (has_auto && this_mask)
2707 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2708 " OpenACC loop specifiers");
2710 if (this_mask & outer_mask)
2711 error_at (gimple_location (stmt), "inner loop uses same"
2712 " OpenACC parallelism as containing loop");
2715 return outer_mask | this_mask;
2718 /* Scan a GIMPLE_OMP_FOR. */
2720 static omp_context *
2721 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2723 omp_context *ctx;
2724 size_t i;
2725 tree clauses = gimple_omp_for_clauses (stmt);
2727 ctx = new_omp_context (stmt, outer_ctx);
2729 if (is_gimple_omp_oacc (stmt))
2731 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2733 if (!(tgt && is_oacc_kernels (tgt)))
2734 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2736 tree c_op0;
2737 switch (OMP_CLAUSE_CODE (c))
2739 case OMP_CLAUSE_GANG:
2740 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2741 break;
2743 case OMP_CLAUSE_WORKER:
2744 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2745 break;
2747 case OMP_CLAUSE_VECTOR:
2748 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2749 break;
2751 default:
2752 continue;
2755 if (c_op0)
2757 /* By construction, this is impossible for OpenACC 'kernels'
2758 decomposed parts. */
2759 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2761 error_at (OMP_CLAUSE_LOCATION (c),
2762 "argument not permitted on %qs clause",
2763 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2764 if (tgt)
2765 inform (gimple_location (tgt->stmt),
2766 "enclosing parent compute construct");
2767 else if (oacc_get_fn_attrib (current_function_decl))
2768 inform (DECL_SOURCE_LOCATION (current_function_decl),
2769 "enclosing routine");
2770 else
2771 gcc_unreachable ();
2775 if (tgt && is_oacc_kernels (tgt))
2776 check_oacc_kernel_gwv (stmt, ctx);
2778 /* Collect all variables named in reductions on this loop. Ensure
2779 that, if this loop has a reduction on some variable v, and there is
2780 a reduction on v somewhere in an outer context, then there is a
2781 reduction on v on all intervening loops as well. */
2782 tree local_reduction_clauses = NULL;
2783 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2785 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2786 local_reduction_clauses
2787 = tree_cons (NULL, c, local_reduction_clauses);
2789 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2790 ctx->outer_reduction_clauses
2791 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2792 ctx->outer->outer_reduction_clauses);
2793 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2794 tree local_iter = local_reduction_clauses;
2795 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2797 tree local_clause = TREE_VALUE (local_iter);
2798 tree local_var = OMP_CLAUSE_DECL (local_clause);
2799 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2800 bool have_outer_reduction = false;
2801 tree ctx_iter = outer_reduction_clauses;
2802 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2804 tree outer_clause = TREE_VALUE (ctx_iter);
2805 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2806 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2807 if (outer_var == local_var && outer_op != local_op)
2809 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2810 "conflicting reduction operations for %qE",
2811 local_var);
2812 inform (OMP_CLAUSE_LOCATION (outer_clause),
2813 "location of the previous reduction for %qE",
2814 outer_var);
2816 if (outer_var == local_var)
2818 have_outer_reduction = true;
2819 break;
2822 if (have_outer_reduction)
2824 /* There is a reduction on outer_var both on this loop and on
2825 some enclosing loop. Walk up the context tree until such a
2826 loop with a reduction on outer_var is found, and complain
2827 about all intervening loops that do not have such a
2828 reduction. */
2829 struct omp_context *curr_loop = ctx->outer;
2830 bool found = false;
2831 while (curr_loop != NULL)
2833 tree curr_iter = curr_loop->local_reduction_clauses;
2834 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2836 tree curr_clause = TREE_VALUE (curr_iter);
2837 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2838 if (curr_var == local_var)
2840 found = true;
2841 break;
2844 if (!found)
2845 warning_at (gimple_location (curr_loop->stmt), 0,
2846 "nested loop in reduction needs "
2847 "reduction clause for %qE",
2848 local_var);
2849 else
2850 break;
2851 curr_loop = curr_loop->outer;
2855 ctx->local_reduction_clauses = local_reduction_clauses;
2856 ctx->outer_reduction_clauses
2857 = chainon (unshare_expr (ctx->local_reduction_clauses),
2858 ctx->outer_reduction_clauses);
2860 if (tgt && is_oacc_kernels (tgt))
2862 /* Strip out reductions, as they are not handled yet. */
2863 tree *prev_ptr = &clauses;
2865 while (tree probe = *prev_ptr)
2867 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2869 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2870 *prev_ptr = *next_ptr;
2871 else
2872 prev_ptr = next_ptr;
2875 gimple_omp_for_set_clauses (stmt, clauses);
2879 scan_sharing_clauses (clauses, ctx);
2881 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2882 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2884 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2885 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2886 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2887 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2889 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2890 return ctx;
2893 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2895 static void
2896 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2897 omp_context *outer_ctx)
2899 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2900 gsi_replace (gsi, bind, false);
2901 gimple_seq seq = NULL;
2902 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2903 tree cond = create_tmp_var_raw (integer_type_node);
2904 DECL_CONTEXT (cond) = current_function_decl;
2905 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2906 gimple_bind_set_vars (bind, cond);
2907 gimple_call_set_lhs (g, cond);
2908 gimple_seq_add_stmt (&seq, g);
2909 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2910 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2911 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2912 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2913 gimple_seq_add_stmt (&seq, g);
2914 g = gimple_build_label (lab1);
2915 gimple_seq_add_stmt (&seq, g);
2916 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2917 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2918 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2919 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2920 gimple_omp_for_set_clauses (new_stmt, clause);
2921 gimple_seq_add_stmt (&seq, new_stmt);
2922 g = gimple_build_goto (lab3);
2923 gimple_seq_add_stmt (&seq, g);
2924 g = gimple_build_label (lab2);
2925 gimple_seq_add_stmt (&seq, g);
2926 gimple_seq_add_stmt (&seq, stmt);
2927 g = gimple_build_label (lab3);
2928 gimple_seq_add_stmt (&seq, g);
2929 gimple_bind_set_body (bind, seq);
2930 update_stmt (bind);
2931 scan_omp_for (new_stmt, outer_ctx);
2932 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2935 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2936 struct walk_stmt_info *);
2937 static omp_context *maybe_lookup_ctx (gimple *);
2939 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2940 for scan phase loop. */
2942 static void
2943 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2944 omp_context *outer_ctx)
2946 /* The only change between inclusive and exclusive scan will be
2947 within the first simd loop, so just use inclusive in the
2948 worksharing loop. */
2949 outer_ctx->scan_inclusive = true;
2950 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2951 OMP_CLAUSE_DECL (c) = integer_zero_node;
2953 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2954 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2955 gsi_replace (gsi, input_stmt, false);
2956 gimple_seq input_body = NULL;
2957 gimple_seq_add_stmt (&input_body, stmt);
2958 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2960 gimple_stmt_iterator input1_gsi = gsi_none ();
2961 struct walk_stmt_info wi;
2962 memset (&wi, 0, sizeof (wi));
2963 wi.val_only = true;
2964 wi.info = (void *) &input1_gsi;
2965 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2966 gcc_assert (!gsi_end_p (input1_gsi));
2968 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2969 gsi_next (&input1_gsi);
2970 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2971 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2972 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2973 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2974 std::swap (input_stmt1, scan_stmt1);
2976 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2977 gimple_omp_set_body (input_stmt1, NULL);
2979 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2980 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2982 gimple_omp_set_body (input_stmt1, input_body1);
2983 gimple_omp_set_body (scan_stmt1, NULL);
2985 gimple_stmt_iterator input2_gsi = gsi_none ();
2986 memset (&wi, 0, sizeof (wi));
2987 wi.val_only = true;
2988 wi.info = (void *) &input2_gsi;
2989 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2990 NULL, &wi);
2991 gcc_assert (!gsi_end_p (input2_gsi));
2993 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2994 gsi_next (&input2_gsi);
2995 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2996 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2997 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2998 std::swap (input_stmt2, scan_stmt2);
3000 gimple_omp_set_body (input_stmt2, NULL);
3002 gimple_omp_set_body (input_stmt, input_body);
3003 gimple_omp_set_body (scan_stmt, scan_body);
3005 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3006 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3008 ctx = new_omp_context (scan_stmt, outer_ctx);
3009 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3011 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3014 /* Scan an OpenMP sections directive. */
3016 static void
3017 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3019 omp_context *ctx;
3021 ctx = new_omp_context (stmt, outer_ctx);
3022 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3023 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3026 /* Scan an OpenMP single directive. */
3028 static void
3029 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3031 omp_context *ctx;
3032 tree name;
3034 ctx = new_omp_context (stmt, outer_ctx);
3035 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3036 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3037 name = create_tmp_var_name (".omp_copy_s");
3038 name = build_decl (gimple_location (stmt),
3039 TYPE_DECL, name, ctx->record_type);
3040 TYPE_NAME (ctx->record_type) = name;
3042 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3043 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3045 if (TYPE_FIELDS (ctx->record_type) == NULL)
3046 ctx->record_type = NULL;
3047 else
3048 layout_type (ctx->record_type);
3051 /* Scan a GIMPLE_OMP_TARGET. */
3053 static void
3054 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3056 omp_context *ctx;
3057 tree name;
3058 bool offloaded = is_gimple_omp_offloaded (stmt);
3059 tree clauses = gimple_omp_target_clauses (stmt);
3061 ctx = new_omp_context (stmt, outer_ctx);
3062 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3063 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3064 name = create_tmp_var_name (".omp_data_t");
3065 name = build_decl (gimple_location (stmt),
3066 TYPE_DECL, name, ctx->record_type);
3067 DECL_ARTIFICIAL (name) = 1;
3068 DECL_NAMELESS (name) = 1;
3069 TYPE_NAME (ctx->record_type) = name;
3070 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3072 if (offloaded)
3074 create_omp_child_function (ctx, false);
3075 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3078 scan_sharing_clauses (clauses, ctx);
3079 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3081 if (TYPE_FIELDS (ctx->record_type) == NULL)
3082 ctx->record_type = ctx->receiver_decl = NULL;
3083 else
3085 TYPE_FIELDS (ctx->record_type)
3086 = nreverse (TYPE_FIELDS (ctx->record_type));
3087 if (flag_checking)
3089 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3090 for (tree field = TYPE_FIELDS (ctx->record_type);
3091 field;
3092 field = DECL_CHAIN (field))
3093 gcc_assert (DECL_ALIGN (field) == align);
3095 layout_type (ctx->record_type);
3096 if (offloaded)
3097 fixup_child_record_type (ctx);
3100 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3102 error_at (gimple_location (stmt),
3103 "%<target%> construct with nested %<teams%> construct "
3104 "contains directives outside of the %<teams%> construct");
3105 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3109 /* Scan an OpenMP teams directive. */
3111 static void
3112 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3114 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3116 if (!gimple_omp_teams_host (stmt))
3118 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3119 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3120 return;
3122 taskreg_contexts.safe_push (ctx);
3123 gcc_assert (taskreg_nesting_level == 1);
3124 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3125 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3126 tree name = create_tmp_var_name (".omp_data_s");
3127 name = build_decl (gimple_location (stmt),
3128 TYPE_DECL, name, ctx->record_type);
3129 DECL_ARTIFICIAL (name) = 1;
3130 DECL_NAMELESS (name) = 1;
3131 TYPE_NAME (ctx->record_type) = name;
3132 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3133 create_omp_child_function (ctx, false);
3134 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3136 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3137 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3139 if (TYPE_FIELDS (ctx->record_type) == NULL)
3140 ctx->record_type = ctx->receiver_decl = NULL;
3143 /* Check nesting restrictions. */
3144 static bool
3145 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3147 tree c;
3149 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3150 inside an OpenACC CTX. */
3151 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3152 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3153 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3155 else if (!(is_gimple_omp (stmt)
3156 && is_gimple_omp_oacc (stmt)))
3158 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3160 error_at (gimple_location (stmt),
3161 "non-OpenACC construct inside of OpenACC routine");
3162 return false;
3164 else
3165 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3166 if (is_gimple_omp (octx->stmt)
3167 && is_gimple_omp_oacc (octx->stmt))
3169 error_at (gimple_location (stmt),
3170 "non-OpenACC construct inside of OpenACC region");
3171 return false;
3175 if (ctx != NULL)
3177 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3178 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3180 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3181 OMP_CLAUSE_DEVICE);
3182 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3184 error_at (gimple_location (stmt),
3185 "OpenMP constructs are not allowed in target region "
3186 "with %<ancestor%>");
3187 return false;
3190 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3191 ctx->teams_nested_p = true;
3192 else
3193 ctx->nonteams_nested_p = true;
3195 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3196 && ctx->outer
3197 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3198 ctx = ctx->outer;
3199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3200 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3201 && !ctx->loop_p)
3203 c = NULL_TREE;
3204 if (ctx->order_concurrent
3205 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3206 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3207 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3209 error_at (gimple_location (stmt),
3210 "OpenMP constructs other than %<parallel%>, %<loop%>"
3211 " or %<simd%> may not be nested inside a region with"
3212 " the %<order(concurrent)%> clause");
3213 return false;
3215 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3217 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3218 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3220 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3221 && (ctx->outer == NULL
3222 || !gimple_omp_for_combined_into_p (ctx->stmt)
3223 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3224 || (gimple_omp_for_kind (ctx->outer->stmt)
3225 != GF_OMP_FOR_KIND_FOR)
3226 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3228 error_at (gimple_location (stmt),
3229 "%<ordered simd threads%> must be closely "
3230 "nested inside of %<%s simd%> region",
3231 lang_GNU_Fortran () ? "do" : "for");
3232 return false;
3234 return true;
3237 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3238 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3239 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3240 return true;
3241 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3242 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3243 return true;
3244 error_at (gimple_location (stmt),
3245 "OpenMP constructs other than "
3246 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3247 "not be nested inside %<simd%> region");
3248 return false;
3250 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3252 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3253 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3254 && omp_find_clause (gimple_omp_for_clauses (stmt),
3255 OMP_CLAUSE_BIND) == NULL_TREE))
3256 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3258 error_at (gimple_location (stmt),
3259 "only %<distribute%>, %<parallel%> or %<loop%> "
3260 "regions are allowed to be strictly nested inside "
3261 "%<teams%> region");
3262 return false;
3265 else if (ctx->order_concurrent
3266 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3267 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3268 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3269 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3271 if (ctx->loop_p)
3272 error_at (gimple_location (stmt),
3273 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3274 "%<simd%> may not be nested inside a %<loop%> region");
3275 else
3276 error_at (gimple_location (stmt),
3277 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3278 "%<simd%> may not be nested inside a region with "
3279 "the %<order(concurrent)%> clause");
3280 return false;
3283 switch (gimple_code (stmt))
3285 case GIMPLE_OMP_FOR:
3286 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3287 return true;
3288 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3290 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3292 error_at (gimple_location (stmt),
3293 "%<distribute%> region must be strictly nested "
3294 "inside %<teams%> construct");
3295 return false;
3297 return true;
3299 /* We split taskloop into task and nested taskloop in it. */
3300 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3301 return true;
3302 /* For now, hope this will change and loop bind(parallel) will not
3303 be allowed in lots of contexts. */
3304 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3305 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3306 return true;
3307 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3309 bool ok = false;
3311 if (ctx)
3312 switch (gimple_code (ctx->stmt))
3314 case GIMPLE_OMP_FOR:
3315 ok = (gimple_omp_for_kind (ctx->stmt)
3316 == GF_OMP_FOR_KIND_OACC_LOOP);
3317 break;
3319 case GIMPLE_OMP_TARGET:
3320 switch (gimple_omp_target_kind (ctx->stmt))
3322 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3323 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3324 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3325 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3326 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3327 ok = true;
3328 break;
3330 default:
3331 break;
3334 default:
3335 break;
3337 else if (oacc_get_fn_attrib (current_function_decl))
3338 ok = true;
3339 if (!ok)
3341 error_at (gimple_location (stmt),
3342 "OpenACC loop directive must be associated with"
3343 " an OpenACC compute region");
3344 return false;
3347 /* FALLTHRU */
3348 case GIMPLE_CALL:
3349 if (is_gimple_call (stmt)
3350 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3351 == BUILT_IN_GOMP_CANCEL
3352 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3353 == BUILT_IN_GOMP_CANCELLATION_POINT))
3355 const char *bad = NULL;
3356 const char *kind = NULL;
3357 const char *construct
3358 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3359 == BUILT_IN_GOMP_CANCEL)
3360 ? "cancel"
3361 : "cancellation point";
3362 if (ctx == NULL)
3364 error_at (gimple_location (stmt), "orphaned %qs construct",
3365 construct);
3366 return false;
3368 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3369 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3370 : 0)
3372 case 1:
3373 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3374 bad = "parallel";
3375 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3376 == BUILT_IN_GOMP_CANCEL
3377 && !integer_zerop (gimple_call_arg (stmt, 1)))
3378 ctx->cancellable = true;
3379 kind = "parallel";
3380 break;
3381 case 2:
3382 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3383 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3384 bad = "for";
3385 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3386 == BUILT_IN_GOMP_CANCEL
3387 && !integer_zerop (gimple_call_arg (stmt, 1)))
3389 ctx->cancellable = true;
3390 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3391 OMP_CLAUSE_NOWAIT))
3392 warning_at (gimple_location (stmt), 0,
3393 "%<cancel for%> inside "
3394 "%<nowait%> for construct");
3395 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3396 OMP_CLAUSE_ORDERED))
3397 warning_at (gimple_location (stmt), 0,
3398 "%<cancel for%> inside "
3399 "%<ordered%> for construct");
3401 kind = "for";
3402 break;
3403 case 4:
3404 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3405 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3406 bad = "sections";
3407 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3408 == BUILT_IN_GOMP_CANCEL
3409 && !integer_zerop (gimple_call_arg (stmt, 1)))
3411 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3413 ctx->cancellable = true;
3414 if (omp_find_clause (gimple_omp_sections_clauses
3415 (ctx->stmt),
3416 OMP_CLAUSE_NOWAIT))
3417 warning_at (gimple_location (stmt), 0,
3418 "%<cancel sections%> inside "
3419 "%<nowait%> sections construct");
3421 else
3423 gcc_assert (ctx->outer
3424 && gimple_code (ctx->outer->stmt)
3425 == GIMPLE_OMP_SECTIONS);
3426 ctx->outer->cancellable = true;
3427 if (omp_find_clause (gimple_omp_sections_clauses
3428 (ctx->outer->stmt),
3429 OMP_CLAUSE_NOWAIT))
3430 warning_at (gimple_location (stmt), 0,
3431 "%<cancel sections%> inside "
3432 "%<nowait%> sections construct");
3435 kind = "sections";
3436 break;
3437 case 8:
3438 if (!is_task_ctx (ctx)
3439 && (!is_taskloop_ctx (ctx)
3440 || ctx->outer == NULL
3441 || !is_task_ctx (ctx->outer)))
3442 bad = "task";
3443 else
3445 for (omp_context *octx = ctx->outer;
3446 octx; octx = octx->outer)
3448 switch (gimple_code (octx->stmt))
3450 case GIMPLE_OMP_TASKGROUP:
3451 break;
3452 case GIMPLE_OMP_TARGET:
3453 if (gimple_omp_target_kind (octx->stmt)
3454 != GF_OMP_TARGET_KIND_REGION)
3455 continue;
3456 /* FALLTHRU */
3457 case GIMPLE_OMP_PARALLEL:
3458 case GIMPLE_OMP_TEAMS:
3459 error_at (gimple_location (stmt),
3460 "%<%s taskgroup%> construct not closely "
3461 "nested inside of %<taskgroup%> region",
3462 construct);
3463 return false;
3464 case GIMPLE_OMP_TASK:
3465 if (gimple_omp_task_taskloop_p (octx->stmt)
3466 && octx->outer
3467 && is_taskloop_ctx (octx->outer))
3469 tree clauses
3470 = gimple_omp_for_clauses (octx->outer->stmt);
3471 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3472 break;
3474 continue;
3475 default:
3476 continue;
3478 break;
3480 ctx->cancellable = true;
3482 kind = "taskgroup";
3483 break;
3484 default:
3485 error_at (gimple_location (stmt), "invalid arguments");
3486 return false;
3488 if (bad)
3490 error_at (gimple_location (stmt),
3491 "%<%s %s%> construct not closely nested inside of %qs",
3492 construct, kind, bad);
3493 return false;
3496 /* FALLTHRU */
3497 case GIMPLE_OMP_SECTIONS:
3498 case GIMPLE_OMP_SINGLE:
3499 for (; ctx != NULL; ctx = ctx->outer)
3500 switch (gimple_code (ctx->stmt))
3502 case GIMPLE_OMP_FOR:
3503 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3504 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3505 break;
3506 /* FALLTHRU */
3507 case GIMPLE_OMP_SECTIONS:
3508 case GIMPLE_OMP_SINGLE:
3509 case GIMPLE_OMP_ORDERED:
3510 case GIMPLE_OMP_MASTER:
3511 case GIMPLE_OMP_MASKED:
3512 case GIMPLE_OMP_TASK:
3513 case GIMPLE_OMP_CRITICAL:
3514 if (is_gimple_call (stmt))
3516 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3517 != BUILT_IN_GOMP_BARRIER)
3518 return true;
3519 error_at (gimple_location (stmt),
3520 "barrier region may not be closely nested inside "
3521 "of work-sharing, %<loop%>, %<critical%>, "
3522 "%<ordered%>, %<master%>, %<masked%>, explicit "
3523 "%<task%> or %<taskloop%> region");
3524 return false;
3526 error_at (gimple_location (stmt),
3527 "work-sharing region may not be closely nested inside "
3528 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3529 "%<master%>, %<masked%>, explicit %<task%> or "
3530 "%<taskloop%> region");
3531 return false;
3532 case GIMPLE_OMP_PARALLEL:
3533 case GIMPLE_OMP_TEAMS:
3534 return true;
3535 case GIMPLE_OMP_TARGET:
3536 if (gimple_omp_target_kind (ctx->stmt)
3537 == GF_OMP_TARGET_KIND_REGION)
3538 return true;
3539 break;
3540 default:
3541 break;
3543 break;
3544 case GIMPLE_OMP_MASTER:
3545 case GIMPLE_OMP_MASKED:
3546 for (; ctx != NULL; ctx = ctx->outer)
3547 switch (gimple_code (ctx->stmt))
3549 case GIMPLE_OMP_FOR:
3550 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3551 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3552 break;
3553 /* FALLTHRU */
3554 case GIMPLE_OMP_SECTIONS:
3555 case GIMPLE_OMP_SINGLE:
3556 case GIMPLE_OMP_TASK:
3557 error_at (gimple_location (stmt),
3558 "%qs region may not be closely nested inside "
3559 "of work-sharing, %<loop%>, explicit %<task%> or "
3560 "%<taskloop%> region",
3561 gimple_code (stmt) == GIMPLE_OMP_MASTER
3562 ? "master" : "masked");
3563 return false;
3564 case GIMPLE_OMP_PARALLEL:
3565 case GIMPLE_OMP_TEAMS:
3566 return true;
3567 case GIMPLE_OMP_TARGET:
3568 if (gimple_omp_target_kind (ctx->stmt)
3569 == GF_OMP_TARGET_KIND_REGION)
3570 return true;
3571 break;
3572 default:
3573 break;
3575 break;
3576 case GIMPLE_OMP_SCOPE:
3577 for (; ctx != NULL; ctx = ctx->outer)
3578 switch (gimple_code (ctx->stmt))
3580 case GIMPLE_OMP_FOR:
3581 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3582 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3583 break;
3584 /* FALLTHRU */
3585 case GIMPLE_OMP_SECTIONS:
3586 case GIMPLE_OMP_SINGLE:
3587 case GIMPLE_OMP_TASK:
3588 case GIMPLE_OMP_CRITICAL:
3589 case GIMPLE_OMP_ORDERED:
3590 case GIMPLE_OMP_MASTER:
3591 case GIMPLE_OMP_MASKED:
3592 error_at (gimple_location (stmt),
3593 "%<scope%> region may not be closely nested inside "
3594 "of work-sharing, %<loop%>, explicit %<task%>, "
3595 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3596 "or %<masked%> region");
3597 return false;
3598 case GIMPLE_OMP_PARALLEL:
3599 case GIMPLE_OMP_TEAMS:
3600 return true;
3601 case GIMPLE_OMP_TARGET:
3602 if (gimple_omp_target_kind (ctx->stmt)
3603 == GF_OMP_TARGET_KIND_REGION)
3604 return true;
3605 break;
3606 default:
3607 break;
3609 break;
3610 case GIMPLE_OMP_TASK:
3611 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3612 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3613 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3614 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3616 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3617 error_at (OMP_CLAUSE_LOCATION (c),
3618 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3619 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3620 return false;
3622 break;
3623 case GIMPLE_OMP_ORDERED:
3624 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3625 c; c = OMP_CLAUSE_CHAIN (c))
3627 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3629 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3630 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3631 continue;
3633 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3634 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3635 || kind == OMP_CLAUSE_DEPEND_SINK)
3637 tree oclause;
3638 /* Look for containing ordered(N) loop. */
3639 if (ctx == NULL
3640 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3641 || (oclause
3642 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3643 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3645 error_at (OMP_CLAUSE_LOCATION (c),
3646 "%<ordered%> construct with %<depend%> clause "
3647 "must be closely nested inside an %<ordered%> "
3648 "loop");
3649 return false;
3651 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3653 error_at (OMP_CLAUSE_LOCATION (c),
3654 "%<ordered%> construct with %<depend%> clause "
3655 "must be closely nested inside a loop with "
3656 "%<ordered%> clause with a parameter");
3657 return false;
3660 else
3662 error_at (OMP_CLAUSE_LOCATION (c),
3663 "invalid depend kind in omp %<ordered%> %<depend%>");
3664 return false;
3667 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3668 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3670 /* ordered simd must be closely nested inside of simd region,
3671 and simd region must not encounter constructs other than
3672 ordered simd, therefore ordered simd may be either orphaned,
3673 or ctx->stmt must be simd. The latter case is handled already
3674 earlier. */
3675 if (ctx != NULL)
3677 error_at (gimple_location (stmt),
3678 "%<ordered%> %<simd%> must be closely nested inside "
3679 "%<simd%> region");
3680 return false;
3683 for (; ctx != NULL; ctx = ctx->outer)
3684 switch (gimple_code (ctx->stmt))
3686 case GIMPLE_OMP_CRITICAL:
3687 case GIMPLE_OMP_TASK:
3688 case GIMPLE_OMP_ORDERED:
3689 ordered_in_taskloop:
3690 error_at (gimple_location (stmt),
3691 "%<ordered%> region may not be closely nested inside "
3692 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3693 "%<taskloop%> region");
3694 return false;
3695 case GIMPLE_OMP_FOR:
3696 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3697 goto ordered_in_taskloop;
3698 tree o;
3699 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3700 OMP_CLAUSE_ORDERED);
3701 if (o == NULL)
3703 error_at (gimple_location (stmt),
3704 "%<ordered%> region must be closely nested inside "
3705 "a loop region with an %<ordered%> clause");
3706 return false;
3708 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3709 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3711 error_at (gimple_location (stmt),
3712 "%<ordered%> region without %<depend%> clause may "
3713 "not be closely nested inside a loop region with "
3714 "an %<ordered%> clause with a parameter");
3715 return false;
3717 return true;
3718 case GIMPLE_OMP_TARGET:
3719 if (gimple_omp_target_kind (ctx->stmt)
3720 != GF_OMP_TARGET_KIND_REGION)
3721 break;
3722 /* FALLTHRU */
3723 case GIMPLE_OMP_PARALLEL:
3724 case GIMPLE_OMP_TEAMS:
3725 error_at (gimple_location (stmt),
3726 "%<ordered%> region must be closely nested inside "
3727 "a loop region with an %<ordered%> clause");
3728 return false;
3729 default:
3730 break;
3732 break;
3733 case GIMPLE_OMP_CRITICAL:
3735 tree this_stmt_name
3736 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3737 for (; ctx != NULL; ctx = ctx->outer)
3738 if (gomp_critical *other_crit
3739 = dyn_cast <gomp_critical *> (ctx->stmt))
3740 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3742 error_at (gimple_location (stmt),
3743 "%<critical%> region may not be nested inside "
3744 "a %<critical%> region with the same name");
3745 return false;
3748 break;
3749 case GIMPLE_OMP_TEAMS:
3750 if (ctx == NULL)
3751 break;
3752 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3753 || (gimple_omp_target_kind (ctx->stmt)
3754 != GF_OMP_TARGET_KIND_REGION))
3756 /* Teams construct can appear either strictly nested inside of
3757 target construct with no intervening stmts, or can be encountered
3758 only by initial task (so must not appear inside any OpenMP
3759 construct. */
3760 error_at (gimple_location (stmt),
3761 "%<teams%> construct must be closely nested inside of "
3762 "%<target%> construct or not nested in any OpenMP "
3763 "construct");
3764 return false;
3766 break;
3767 case GIMPLE_OMP_TARGET:
3768 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3769 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3770 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3771 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3773 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3774 error_at (OMP_CLAUSE_LOCATION (c),
3775 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3776 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3777 return false;
3779 if (is_gimple_omp_offloaded (stmt)
3780 && oacc_get_fn_attrib (cfun->decl) != NULL)
3782 error_at (gimple_location (stmt),
3783 "OpenACC region inside of OpenACC routine, nested "
3784 "parallelism not supported yet");
3785 return false;
3787 for (; ctx != NULL; ctx = ctx->outer)
3789 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3791 if (is_gimple_omp (stmt)
3792 && is_gimple_omp_oacc (stmt)
3793 && is_gimple_omp (ctx->stmt))
3795 error_at (gimple_location (stmt),
3796 "OpenACC construct inside of non-OpenACC region");
3797 return false;
3799 continue;
3802 const char *stmt_name, *ctx_stmt_name;
3803 switch (gimple_omp_target_kind (stmt))
3805 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3806 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3807 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3808 case GF_OMP_TARGET_KIND_ENTER_DATA:
3809 stmt_name = "target enter data"; break;
3810 case GF_OMP_TARGET_KIND_EXIT_DATA:
3811 stmt_name = "target exit data"; break;
3812 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3813 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3814 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3815 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3816 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3817 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3818 stmt_name = "enter data"; break;
3819 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3820 stmt_name = "exit data"; break;
3821 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3822 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3823 break;
3824 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3825 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3826 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3827 /* OpenACC 'kernels' decomposed parts. */
3828 stmt_name = "kernels"; break;
3829 default: gcc_unreachable ();
3831 switch (gimple_omp_target_kind (ctx->stmt))
3833 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3834 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3835 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3836 ctx_stmt_name = "parallel"; break;
3837 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3838 ctx_stmt_name = "kernels"; break;
3839 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3840 ctx_stmt_name = "serial"; break;
3841 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3842 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3843 ctx_stmt_name = "host_data"; break;
3844 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3845 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3846 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3847 /* OpenACC 'kernels' decomposed parts. */
3848 ctx_stmt_name = "kernels"; break;
3849 default: gcc_unreachable ();
3852 /* OpenACC/OpenMP mismatch? */
3853 if (is_gimple_omp_oacc (stmt)
3854 != is_gimple_omp_oacc (ctx->stmt))
3856 error_at (gimple_location (stmt),
3857 "%s %qs construct inside of %s %qs region",
3858 (is_gimple_omp_oacc (stmt)
3859 ? "OpenACC" : "OpenMP"), stmt_name,
3860 (is_gimple_omp_oacc (ctx->stmt)
3861 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3862 return false;
3864 if (is_gimple_omp_offloaded (ctx->stmt))
3866 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3867 if (is_gimple_omp_oacc (ctx->stmt))
3869 error_at (gimple_location (stmt),
3870 "%qs construct inside of %qs region",
3871 stmt_name, ctx_stmt_name);
3872 return false;
3874 else
3876 warning_at (gimple_location (stmt), 0,
3877 "%qs construct inside of %qs region",
3878 stmt_name, ctx_stmt_name);
3882 break;
3883 default:
3884 break;
3886 return true;
3890 /* Helper function scan_omp.
3892 Callback for walk_tree or operators in walk_gimple_stmt used to
3893 scan for OMP directives in TP. */
3895 static tree
3896 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3898 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3899 omp_context *ctx = (omp_context *) wi->info;
3900 tree t = *tp;
3902 switch (TREE_CODE (t))
3904 case VAR_DECL:
3905 case PARM_DECL:
3906 case LABEL_DECL:
3907 case RESULT_DECL:
3908 if (ctx)
3910 tree repl = remap_decl (t, &ctx->cb);
3911 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3912 *tp = repl;
3914 break;
3916 default:
3917 if (ctx && TYPE_P (t))
3918 *tp = remap_type (t, &ctx->cb);
3919 else if (!DECL_P (t))
3921 *walk_subtrees = 1;
3922 if (ctx)
3924 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3925 if (tem != TREE_TYPE (t))
3927 if (TREE_CODE (t) == INTEGER_CST)
3928 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3929 else
3930 TREE_TYPE (t) = tem;
3934 break;
3937 return NULL_TREE;
3940 /* Return true if FNDECL is a setjmp or a longjmp. */
3942 static bool
3943 setjmp_or_longjmp_p (const_tree fndecl)
3945 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3946 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3947 return true;
3949 tree declname = DECL_NAME (fndecl);
3950 if (!declname
3951 || (DECL_CONTEXT (fndecl) != NULL_TREE
3952 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3953 || !TREE_PUBLIC (fndecl))
3954 return false;
3956 const char *name = IDENTIFIER_POINTER (declname);
3957 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3960 /* Return true if FNDECL is an omp_* runtime API call. */
3962 static bool
3963 omp_runtime_api_call (const_tree fndecl)
3965 tree declname = DECL_NAME (fndecl);
3966 if (!declname
3967 || (DECL_CONTEXT (fndecl) != NULL_TREE
3968 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3969 || !TREE_PUBLIC (fndecl))
3970 return false;
3972 const char *name = IDENTIFIER_POINTER (declname);
3973 if (!startswith (name, "omp_"))
3974 return false;
3976 static const char *omp_runtime_apis[] =
3978 /* This array has 3 sections. First omp_* calls that don't
3979 have any suffixes. */
3980 "aligned_alloc",
3981 "aligned_calloc",
3982 "alloc",
3983 "calloc",
3984 "free",
3985 "realloc",
3986 "target_alloc",
3987 "target_associate_ptr",
3988 "target_disassociate_ptr",
3989 "target_free",
3990 "target_is_present",
3991 "target_memcpy",
3992 "target_memcpy_rect",
3993 NULL,
3994 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
3995 DECL_NAME is always omp_* without tailing underscore. */
3996 "capture_affinity",
3997 "destroy_allocator",
3998 "destroy_lock",
3999 "destroy_nest_lock",
4000 "display_affinity",
4001 "fulfill_event",
4002 "get_active_level",
4003 "get_affinity_format",
4004 "get_cancellation",
4005 "get_default_allocator",
4006 "get_default_device",
4007 "get_device_num",
4008 "get_dynamic",
4009 "get_initial_device",
4010 "get_level",
4011 "get_max_active_levels",
4012 "get_max_task_priority",
4013 "get_max_teams",
4014 "get_max_threads",
4015 "get_nested",
4016 "get_num_devices",
4017 "get_num_places",
4018 "get_num_procs",
4019 "get_num_teams",
4020 "get_num_threads",
4021 "get_partition_num_places",
4022 "get_place_num",
4023 "get_proc_bind",
4024 "get_supported_active_levels",
4025 "get_team_num",
4026 "get_teams_thread_limit",
4027 "get_thread_limit",
4028 "get_thread_num",
4029 "get_wtick",
4030 "get_wtime",
4031 "in_final",
4032 "in_parallel",
4033 "init_lock",
4034 "init_nest_lock",
4035 "is_initial_device",
4036 "pause_resource",
4037 "pause_resource_all",
4038 "set_affinity_format",
4039 "set_default_allocator",
4040 "set_lock",
4041 "set_nest_lock",
4042 "test_lock",
4043 "test_nest_lock",
4044 "unset_lock",
4045 "unset_nest_lock",
4046 NULL,
4047 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4048 as DECL_NAME only omp_* and omp_*_8 appear. */
4049 "display_env",
4050 "get_ancestor_thread_num",
4051 "init_allocator",
4052 "get_partition_place_nums",
4053 "get_place_num_procs",
4054 "get_place_proc_ids",
4055 "get_schedule",
4056 "get_team_size",
4057 "set_default_device",
4058 "set_dynamic",
4059 "set_max_active_levels",
4060 "set_nested",
4061 "set_num_teams",
4062 "set_num_threads",
4063 "set_schedule",
4064 "set_teams_thread_limit"
4067 int mode = 0;
4068 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4070 if (omp_runtime_apis[i] == NULL)
4072 mode++;
4073 continue;
4075 size_t len = strlen (omp_runtime_apis[i]);
4076 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4077 && (name[4 + len] == '\0'
4078 || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4079 return true;
4081 return false;
4084 /* Helper function for scan_omp.
4086 Callback for walk_gimple_stmt used to scan for OMP directives in
4087 the current statement in GSI. */
4089 static tree
4090 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4091 struct walk_stmt_info *wi)
4093 gimple *stmt = gsi_stmt (*gsi);
4094 omp_context *ctx = (omp_context *) wi->info;
4096 if (gimple_has_location (stmt))
4097 input_location = gimple_location (stmt);
4099 /* Check the nesting restrictions. */
4100 bool remove = false;
4101 if (is_gimple_omp (stmt))
4102 remove = !check_omp_nesting_restrictions (stmt, ctx);
4103 else if (is_gimple_call (stmt))
4105 tree fndecl = gimple_call_fndecl (stmt);
4106 if (fndecl)
4108 if (ctx
4109 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4110 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4111 && setjmp_or_longjmp_p (fndecl)
4112 && !ctx->loop_p)
4114 remove = true;
4115 error_at (gimple_location (stmt),
4116 "setjmp/longjmp inside %<simd%> construct");
4118 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4119 switch (DECL_FUNCTION_CODE (fndecl))
4121 case BUILT_IN_GOMP_BARRIER:
4122 case BUILT_IN_GOMP_CANCEL:
4123 case BUILT_IN_GOMP_CANCELLATION_POINT:
4124 case BUILT_IN_GOMP_TASKYIELD:
4125 case BUILT_IN_GOMP_TASKWAIT:
4126 case BUILT_IN_GOMP_TASKGROUP_START:
4127 case BUILT_IN_GOMP_TASKGROUP_END:
4128 remove = !check_omp_nesting_restrictions (stmt, ctx);
4129 break;
4130 default:
4131 break;
4133 else if (ctx)
4135 omp_context *octx = ctx;
4136 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4137 octx = ctx->outer;
4138 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4140 remove = true;
4141 error_at (gimple_location (stmt),
4142 "OpenMP runtime API call %qD in a region with "
4143 "%<order(concurrent)%> clause", fndecl);
4145 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4146 && omp_runtime_api_call (fndecl)
4147 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4148 != strlen ("omp_get_num_teams"))
4149 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4150 "omp_get_num_teams") != 0)
4151 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4152 != strlen ("omp_get_team_num"))
4153 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4154 "omp_get_team_num") != 0))
4156 remove = true;
4157 error_at (gimple_location (stmt),
4158 "OpenMP runtime API call %qD strictly nested in a "
4159 "%<teams%> region", fndecl);
4161 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4162 && (gimple_omp_target_kind (ctx->stmt)
4163 == GF_OMP_TARGET_KIND_REGION)
4164 && omp_runtime_api_call (fndecl))
4166 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4167 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4168 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4169 error_at (gimple_location (stmt),
4170 "OpenMP runtime API call %qD in a region with "
4171 "%<device(ancestor)%> clause", fndecl);
4176 if (remove)
4178 stmt = gimple_build_nop ();
4179 gsi_replace (gsi, stmt, false);
4182 *handled_ops_p = true;
4184 switch (gimple_code (stmt))
4186 case GIMPLE_OMP_PARALLEL:
4187 taskreg_nesting_level++;
4188 scan_omp_parallel (gsi, ctx);
4189 taskreg_nesting_level--;
4190 break;
4192 case GIMPLE_OMP_TASK:
4193 taskreg_nesting_level++;
4194 scan_omp_task (gsi, ctx);
4195 taskreg_nesting_level--;
4196 break;
4198 case GIMPLE_OMP_FOR:
4199 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4200 == GF_OMP_FOR_KIND_SIMD)
4201 && gimple_omp_for_combined_into_p (stmt)
4202 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4204 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4205 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4206 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4208 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4209 break;
4212 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4213 == GF_OMP_FOR_KIND_SIMD)
4214 && omp_maybe_offloaded_ctx (ctx)
4215 && omp_max_simt_vf ()
4216 && gimple_omp_for_collapse (stmt) == 1)
4217 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4218 else
4219 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4220 break;
4222 case GIMPLE_OMP_SCOPE:
4223 ctx = new_omp_context (stmt, ctx);
4224 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4225 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4226 break;
4228 case GIMPLE_OMP_SECTIONS:
4229 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4230 break;
4232 case GIMPLE_OMP_SINGLE:
4233 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4234 break;
4236 case GIMPLE_OMP_SCAN:
4237 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4239 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4240 ctx->scan_inclusive = true;
4241 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4242 ctx->scan_exclusive = true;
4244 /* FALLTHRU */
4245 case GIMPLE_OMP_SECTION:
4246 case GIMPLE_OMP_MASTER:
4247 case GIMPLE_OMP_ORDERED:
4248 case GIMPLE_OMP_CRITICAL:
4249 ctx = new_omp_context (stmt, ctx);
4250 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4251 break;
4253 case GIMPLE_OMP_MASKED:
4254 ctx = new_omp_context (stmt, ctx);
4255 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4256 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4257 break;
4259 case GIMPLE_OMP_TASKGROUP:
4260 ctx = new_omp_context (stmt, ctx);
4261 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4262 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4263 break;
4265 case GIMPLE_OMP_TARGET:
4266 if (is_gimple_omp_offloaded (stmt))
4268 taskreg_nesting_level++;
4269 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4270 taskreg_nesting_level--;
4272 else
4273 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4274 break;
4276 case GIMPLE_OMP_TEAMS:
4277 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4279 taskreg_nesting_level++;
4280 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4281 taskreg_nesting_level--;
4283 else
4284 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4285 break;
4287 case GIMPLE_BIND:
4289 tree var;
4291 *handled_ops_p = false;
4292 if (ctx)
4293 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4294 var ;
4295 var = DECL_CHAIN (var))
4296 insert_decl_map (&ctx->cb, var, var);
4298 break;
4299 default:
4300 *handled_ops_p = false;
4301 break;
4304 return NULL_TREE;
4308 /* Scan all the statements starting at the current statement. CTX
4309 contains context information about the OMP directives and
4310 clauses found during the scan. */
4312 static void
4313 scan_omp (gimple_seq *body_p, omp_context *ctx)
4315 location_t saved_location;
4316 struct walk_stmt_info wi;
4318 memset (&wi, 0, sizeof (wi));
4319 wi.info = ctx;
4320 wi.want_locations = true;
4322 saved_location = input_location;
4323 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4324 input_location = saved_location;
4327 /* Re-gimplification and code generation routines. */
4329 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4330 of BIND if in a method. */
4332 static void
4333 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4335 if (DECL_ARGUMENTS (current_function_decl)
4336 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4337 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4338 == POINTER_TYPE))
4340 tree vars = gimple_bind_vars (bind);
4341 for (tree *pvar = &vars; *pvar; )
4342 if (omp_member_access_dummy_var (*pvar))
4343 *pvar = DECL_CHAIN (*pvar);
4344 else
4345 pvar = &DECL_CHAIN (*pvar);
4346 gimple_bind_set_vars (bind, vars);
4350 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4351 block and its subblocks. */
4353 static void
4354 remove_member_access_dummy_vars (tree block)
4356 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4357 if (omp_member_access_dummy_var (*pvar))
4358 *pvar = DECL_CHAIN (*pvar);
4359 else
4360 pvar = &DECL_CHAIN (*pvar);
4362 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4363 remove_member_access_dummy_vars (block);
4366 /* If a context was created for STMT when it was scanned, return it. */
4368 static omp_context *
4369 maybe_lookup_ctx (gimple *stmt)
4371 splay_tree_node n;
4372 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4373 return n ? (omp_context *) n->value : NULL;
4377 /* Find the mapping for DECL in CTX or the immediately enclosing
4378 context that has a mapping for DECL.
4380 If CTX is a nested parallel directive, we may have to use the decl
4381 mappings created in CTX's parent context. Suppose that we have the
4382 following parallel nesting (variable UIDs showed for clarity):
4384 iD.1562 = 0;
4385 #omp parallel shared(iD.1562) -> outer parallel
4386 iD.1562 = iD.1562 + 1;
4388 #omp parallel shared (iD.1562) -> inner parallel
4389 iD.1562 = iD.1562 - 1;
4391 Each parallel structure will create a distinct .omp_data_s structure
4392 for copying iD.1562 in/out of the directive:
4394 outer parallel .omp_data_s.1.i -> iD.1562
4395 inner parallel .omp_data_s.2.i -> iD.1562
4397 A shared variable mapping will produce a copy-out operation before
4398 the parallel directive and a copy-in operation after it. So, in
4399 this case we would have:
4401 iD.1562 = 0;
4402 .omp_data_o.1.i = iD.1562;
4403 #omp parallel shared(iD.1562) -> outer parallel
4404 .omp_data_i.1 = &.omp_data_o.1
4405 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4407 .omp_data_o.2.i = iD.1562; -> **
4408 #omp parallel shared(iD.1562) -> inner parallel
4409 .omp_data_i.2 = &.omp_data_o.2
4410 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4413 ** This is a problem. The symbol iD.1562 cannot be referenced
4414 inside the body of the outer parallel region. But since we are
4415 emitting this copy operation while expanding the inner parallel
4416 directive, we need to access the CTX structure of the outer
4417 parallel directive to get the correct mapping:
4419 .omp_data_o.2.i = .omp_data_i.1->i
4421 Since there may be other workshare or parallel directives enclosing
4422 the parallel directive, it may be necessary to walk up the context
4423 parent chain. This is not a problem in general because nested
4424 parallelism happens only rarely. */
4426 static tree
4427 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4429 tree t;
4430 omp_context *up;
4432 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4433 t = maybe_lookup_decl (decl, up);
4435 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4437 return t ? t : decl;
4441 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4442 in outer contexts. */
4444 static tree
4445 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4447 tree t = NULL;
4448 omp_context *up;
4450 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4451 t = maybe_lookup_decl (decl, up);
4453 return t ? t : decl;
4457 /* Construct the initialization value for reduction operation OP. */
4459 tree
4460 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4462 switch (op)
4464 case PLUS_EXPR:
4465 case MINUS_EXPR:
4466 case BIT_IOR_EXPR:
4467 case BIT_XOR_EXPR:
4468 case TRUTH_OR_EXPR:
4469 case TRUTH_ORIF_EXPR:
4470 case TRUTH_XOR_EXPR:
4471 case NE_EXPR:
4472 return build_zero_cst (type);
4474 case MULT_EXPR:
4475 case TRUTH_AND_EXPR:
4476 case TRUTH_ANDIF_EXPR:
4477 case EQ_EXPR:
4478 return fold_convert_loc (loc, type, integer_one_node);
4480 case BIT_AND_EXPR:
4481 return fold_convert_loc (loc, type, integer_minus_one_node);
4483 case MAX_EXPR:
4484 if (SCALAR_FLOAT_TYPE_P (type))
4486 REAL_VALUE_TYPE max, min;
4487 if (HONOR_INFINITIES (type))
4489 real_inf (&max);
4490 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4492 else
4493 real_maxval (&min, 1, TYPE_MODE (type));
4494 return build_real (type, min);
4496 else if (POINTER_TYPE_P (type))
4498 wide_int min
4499 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4500 return wide_int_to_tree (type, min);
4502 else
4504 gcc_assert (INTEGRAL_TYPE_P (type));
4505 return TYPE_MIN_VALUE (type);
4508 case MIN_EXPR:
4509 if (SCALAR_FLOAT_TYPE_P (type))
4511 REAL_VALUE_TYPE max;
4512 if (HONOR_INFINITIES (type))
4513 real_inf (&max);
4514 else
4515 real_maxval (&max, 0, TYPE_MODE (type));
4516 return build_real (type, max);
4518 else if (POINTER_TYPE_P (type))
4520 wide_int max
4521 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4522 return wide_int_to_tree (type, max);
4524 else
4526 gcc_assert (INTEGRAL_TYPE_P (type));
4527 return TYPE_MAX_VALUE (type);
4530 default:
4531 gcc_unreachable ();
4535 /* Construct the initialization value for reduction CLAUSE. */
4537 tree
4538 omp_reduction_init (tree clause, tree type)
4540 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4541 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4544 /* Return alignment to be assumed for var in CLAUSE, which should be
4545 OMP_CLAUSE_ALIGNED. */
4547 static tree
4548 omp_clause_aligned_alignment (tree clause)
4550 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4551 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4553 /* Otherwise return implementation defined alignment. */
4554 unsigned int al = 1;
4555 opt_scalar_mode mode_iter;
4556 auto_vector_modes modes;
4557 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4558 static enum mode_class classes[]
4559 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4560 for (int i = 0; i < 4; i += 2)
4561 /* The for loop above dictates that we only walk through scalar classes. */
4562 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4564 scalar_mode mode = mode_iter.require ();
4565 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4566 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4567 continue;
4568 machine_mode alt_vmode;
4569 for (unsigned int j = 0; j < modes.length (); ++j)
4570 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4571 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4572 vmode = alt_vmode;
4574 tree type = lang_hooks.types.type_for_mode (mode, 1);
4575 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4576 continue;
4577 type = build_vector_type_for_mode (type, vmode);
4578 if (TYPE_MODE (type) != vmode)
4579 continue;
4580 if (TYPE_ALIGN_UNIT (type) > al)
4581 al = TYPE_ALIGN_UNIT (type);
4583 return build_int_cst (integer_type_node, al);
4587 /* This structure is part of the interface between lower_rec_simd_input_clauses
4588 and lower_rec_input_clauses. */
4590 class omplow_simd_context {
4591 public:
4592 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4593 tree idx;
4594 tree lane;
4595 tree lastlane;
4596 vec<tree, va_heap> simt_eargs;
4597 gimple_seq simt_dlist;
4598 poly_uint64_pod max_vf;
4599 bool is_simt;
4602 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4603 privatization. */
4605 static bool
4606 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4607 omplow_simd_context *sctx, tree &ivar,
4608 tree &lvar, tree *rvar = NULL,
4609 tree *rvar2 = NULL)
4611 if (known_eq (sctx->max_vf, 0U))
4613 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4614 if (maybe_gt (sctx->max_vf, 1U))
4616 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4617 OMP_CLAUSE_SAFELEN);
4618 if (c)
4620 poly_uint64 safe_len;
4621 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4622 || maybe_lt (safe_len, 1U))
4623 sctx->max_vf = 1;
4624 else
4625 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4628 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4630 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4631 c = OMP_CLAUSE_CHAIN (c))
4633 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4634 continue;
4636 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4638 /* UDR reductions are not supported yet for SIMT, disable
4639 SIMT. */
4640 sctx->max_vf = 1;
4641 break;
4644 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4645 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4647 /* Doing boolean operations on non-integral types is
4648 for conformance only, it's not worth supporting this
4649 for SIMT. */
4650 sctx->max_vf = 1;
4651 break;
4655 if (maybe_gt (sctx->max_vf, 1U))
4657 sctx->idx = create_tmp_var (unsigned_type_node);
4658 sctx->lane = create_tmp_var (unsigned_type_node);
4661 if (known_eq (sctx->max_vf, 1U))
4662 return false;
4664 if (sctx->is_simt)
4666 if (is_gimple_reg (new_var))
4668 ivar = lvar = new_var;
4669 return true;
4671 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4672 ivar = lvar = create_tmp_var (type);
4673 TREE_ADDRESSABLE (ivar) = 1;
4674 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4675 NULL, DECL_ATTRIBUTES (ivar));
4676 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4677 tree clobber = build_clobber (type);
4678 gimple *g = gimple_build_assign (ivar, clobber);
4679 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4681 else
4683 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4684 tree avar = create_tmp_var_raw (atype);
4685 if (TREE_ADDRESSABLE (new_var))
4686 TREE_ADDRESSABLE (avar) = 1;
4687 DECL_ATTRIBUTES (avar)
4688 = tree_cons (get_identifier ("omp simd array"), NULL,
4689 DECL_ATTRIBUTES (avar));
4690 gimple_add_tmp_var (avar);
4691 tree iavar = avar;
4692 if (rvar && !ctx->for_simd_scan_phase)
4694 /* For inscan reductions, create another array temporary,
4695 which will hold the reduced value. */
4696 iavar = create_tmp_var_raw (atype);
4697 if (TREE_ADDRESSABLE (new_var))
4698 TREE_ADDRESSABLE (iavar) = 1;
4699 DECL_ATTRIBUTES (iavar)
4700 = tree_cons (get_identifier ("omp simd array"), NULL,
4701 tree_cons (get_identifier ("omp simd inscan"), NULL,
4702 DECL_ATTRIBUTES (iavar)));
4703 gimple_add_tmp_var (iavar);
4704 ctx->cb.decl_map->put (avar, iavar);
4705 if (sctx->lastlane == NULL_TREE)
4706 sctx->lastlane = create_tmp_var (unsigned_type_node);
4707 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4708 sctx->lastlane, NULL_TREE, NULL_TREE);
4709 TREE_THIS_NOTRAP (*rvar) = 1;
4711 if (ctx->scan_exclusive)
4713 /* And for exclusive scan yet another one, which will
4714 hold the value during the scan phase. */
4715 tree savar = create_tmp_var_raw (atype);
4716 if (TREE_ADDRESSABLE (new_var))
4717 TREE_ADDRESSABLE (savar) = 1;
4718 DECL_ATTRIBUTES (savar)
4719 = tree_cons (get_identifier ("omp simd array"), NULL,
4720 tree_cons (get_identifier ("omp simd inscan "
4721 "exclusive"), NULL,
4722 DECL_ATTRIBUTES (savar)));
4723 gimple_add_tmp_var (savar);
4724 ctx->cb.decl_map->put (iavar, savar);
4725 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4726 sctx->idx, NULL_TREE, NULL_TREE);
4727 TREE_THIS_NOTRAP (*rvar2) = 1;
4730 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4731 NULL_TREE, NULL_TREE);
4732 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4733 NULL_TREE, NULL_TREE);
4734 TREE_THIS_NOTRAP (ivar) = 1;
4735 TREE_THIS_NOTRAP (lvar) = 1;
4737 if (DECL_P (new_var))
4739 SET_DECL_VALUE_EXPR (new_var, lvar);
4740 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4742 return true;
4745 /* Helper function of lower_rec_input_clauses. For a reference
4746 in simd reduction, add an underlying variable it will reference. */
4748 static void
4749 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4751 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4752 if (TREE_CONSTANT (z))
4754 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4755 get_name (new_vard));
4756 gimple_add_tmp_var (z);
4757 TREE_ADDRESSABLE (z) = 1;
4758 z = build_fold_addr_expr_loc (loc, z);
4759 gimplify_assign (new_vard, z, ilist);
4763 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4764 code to emit (type) (tskred_temp[idx]). */
4766 static tree
4767 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4768 unsigned idx)
4770 unsigned HOST_WIDE_INT sz
4771 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4772 tree r = build2 (MEM_REF, pointer_sized_int_node,
4773 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4774 idx * sz));
4775 tree v = create_tmp_var (pointer_sized_int_node);
4776 gimple *g = gimple_build_assign (v, r);
4777 gimple_seq_add_stmt (ilist, g);
4778 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4780 v = create_tmp_var (type);
4781 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4782 gimple_seq_add_stmt (ilist, g);
4784 return v;
4787 /* Lower early initialization of privatized variable NEW_VAR
4788 if it needs an allocator (has allocate clause). */
4790 static bool
4791 lower_private_allocate (tree var, tree new_var, tree &allocator,
4792 tree &allocate_ptr, gimple_seq *ilist,
4793 omp_context *ctx, bool is_ref, tree size)
4795 if (allocator)
4796 return false;
4797 gcc_assert (allocate_ptr == NULL_TREE);
4798 if (ctx->allocate_map
4799 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4800 if (tree *allocatorp = ctx->allocate_map->get (var))
4801 allocator = *allocatorp;
4802 if (allocator == NULL_TREE)
4803 return false;
4804 if (!is_ref && omp_privatize_by_reference (var))
4806 allocator = NULL_TREE;
4807 return false;
4810 unsigned HOST_WIDE_INT ialign = 0;
4811 if (TREE_CODE (allocator) == TREE_LIST)
4813 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4814 allocator = TREE_PURPOSE (allocator);
4816 if (TREE_CODE (allocator) != INTEGER_CST)
4817 allocator = build_outer_var_ref (allocator, ctx);
4818 allocator = fold_convert (pointer_sized_int_node, allocator);
4819 if (TREE_CODE (allocator) != INTEGER_CST)
4821 tree var = create_tmp_var (TREE_TYPE (allocator));
4822 gimplify_assign (var, allocator, ilist);
4823 allocator = var;
4826 tree ptr_type, align, sz = size;
4827 if (TYPE_P (new_var))
4829 ptr_type = build_pointer_type (new_var);
4830 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4832 else if (is_ref)
4834 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4835 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4837 else
4839 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4840 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4841 if (sz == NULL_TREE)
4842 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4844 align = build_int_cst (size_type_node, ialign);
4845 if (TREE_CODE (sz) != INTEGER_CST)
4847 tree szvar = create_tmp_var (size_type_node);
4848 gimplify_assign (szvar, sz, ilist);
4849 sz = szvar;
4851 allocate_ptr = create_tmp_var (ptr_type);
4852 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4853 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4854 gimple_call_set_lhs (g, allocate_ptr);
4855 gimple_seq_add_stmt (ilist, g);
4856 if (!is_ref)
4858 tree x = build_simple_mem_ref (allocate_ptr);
4859 TREE_THIS_NOTRAP (x) = 1;
4860 SET_DECL_VALUE_EXPR (new_var, x);
4861 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4863 return true;
4866 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4867 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4868 private variables. Initialization statements go in ILIST, while calls
4869 to destructors go in DLIST. */
4871 static void
4872 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4873 omp_context *ctx, struct omp_for_data *fd)
4875 tree c, copyin_seq, x, ptr;
4876 bool copyin_by_ref = false;
4877 bool lastprivate_firstprivate = false;
4878 bool reduction_omp_orig_ref = false;
4879 int pass;
4880 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4881 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4882 omplow_simd_context sctx = omplow_simd_context ();
4883 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4884 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4885 gimple_seq llist[4] = { };
4886 tree nonconst_simd_if = NULL_TREE;
4888 copyin_seq = NULL;
4889 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4891 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4892 with data sharing clauses referencing variable sized vars. That
4893 is unnecessarily hard to support and very unlikely to result in
4894 vectorized code anyway. */
4895 if (is_simd)
4896 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4897 switch (OMP_CLAUSE_CODE (c))
4899 case OMP_CLAUSE_LINEAR:
4900 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4901 sctx.max_vf = 1;
4902 /* FALLTHRU */
4903 case OMP_CLAUSE_PRIVATE:
4904 case OMP_CLAUSE_FIRSTPRIVATE:
4905 case OMP_CLAUSE_LASTPRIVATE:
4906 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4907 sctx.max_vf = 1;
4908 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4910 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4911 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4912 sctx.max_vf = 1;
4914 break;
4915 case OMP_CLAUSE_REDUCTION:
4916 case OMP_CLAUSE_IN_REDUCTION:
4917 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4918 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4919 sctx.max_vf = 1;
4920 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4922 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4923 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4924 sctx.max_vf = 1;
4926 break;
4927 case OMP_CLAUSE_IF:
4928 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4929 sctx.max_vf = 1;
4930 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4931 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4932 break;
4933 case OMP_CLAUSE_SIMDLEN:
4934 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4935 sctx.max_vf = 1;
4936 break;
4937 case OMP_CLAUSE__CONDTEMP_:
4938 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4939 if (sctx.is_simt)
4940 sctx.max_vf = 1;
4941 break;
4942 default:
4943 continue;
4946 /* Add a placeholder for simduid. */
4947 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4948 sctx.simt_eargs.safe_push (NULL_TREE);
4950 unsigned task_reduction_cnt = 0;
4951 unsigned task_reduction_cntorig = 0;
4952 unsigned task_reduction_cnt_full = 0;
4953 unsigned task_reduction_cntorig_full = 0;
4954 unsigned task_reduction_other_cnt = 0;
4955 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4956 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4957 /* Do all the fixed sized types in the first pass, and the variable sized
4958 types in the second pass. This makes sure that the scalar arguments to
4959 the variable sized types are processed before we use them in the
4960 variable sized operations. For task reductions we use 4 passes, in the
4961 first two we ignore them, in the third one gather arguments for
4962 GOMP_task_reduction_remap call and in the last pass actually handle
4963 the task reductions. */
4964 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4965 ? 4 : 2); ++pass)
4967 if (pass == 2 && task_reduction_cnt)
4969 tskred_atype
4970 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4971 + task_reduction_cntorig);
4972 tskred_avar = create_tmp_var_raw (tskred_atype);
4973 gimple_add_tmp_var (tskred_avar);
4974 TREE_ADDRESSABLE (tskred_avar) = 1;
4975 task_reduction_cnt_full = task_reduction_cnt;
4976 task_reduction_cntorig_full = task_reduction_cntorig;
4978 else if (pass == 3 && task_reduction_cnt)
4980 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4981 gimple *g
4982 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4983 size_int (task_reduction_cntorig),
4984 build_fold_addr_expr (tskred_avar));
4985 gimple_seq_add_stmt (ilist, g);
4987 if (pass == 3 && task_reduction_other_cnt)
4989 /* For reduction clauses, build
4990 tskred_base = (void *) tskred_temp[2]
4991 + omp_get_thread_num () * tskred_temp[1]
4992 or if tskred_temp[1] is known to be constant, that constant
4993 directly. This is the start of the private reduction copy block
4994 for the current thread. */
4995 tree v = create_tmp_var (integer_type_node);
4996 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4997 gimple *g = gimple_build_call (x, 0);
4998 gimple_call_set_lhs (g, v);
4999 gimple_seq_add_stmt (ilist, g);
5000 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
5001 tskred_temp = OMP_CLAUSE_DECL (c);
5002 if (is_taskreg_ctx (ctx))
5003 tskred_temp = lookup_decl (tskred_temp, ctx);
5004 tree v2 = create_tmp_var (sizetype);
5005 g = gimple_build_assign (v2, NOP_EXPR, v);
5006 gimple_seq_add_stmt (ilist, g);
5007 if (ctx->task_reductions[0])
5008 v = fold_convert (sizetype, ctx->task_reductions[0]);
5009 else
5010 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
5011 tree v3 = create_tmp_var (sizetype);
5012 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
5013 gimple_seq_add_stmt (ilist, g);
5014 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
5015 tskred_base = create_tmp_var (ptr_type_node);
5016 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
5017 gimple_seq_add_stmt (ilist, g);
5019 task_reduction_cnt = 0;
5020 task_reduction_cntorig = 0;
5021 task_reduction_other_cnt = 0;
5022 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5024 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
5025 tree var, new_var;
5026 bool by_ref;
5027 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5028 bool task_reduction_p = false;
5029 bool task_reduction_needs_orig_p = false;
5030 tree cond = NULL_TREE;
5031 tree allocator, allocate_ptr;
5033 switch (c_kind)
5035 case OMP_CLAUSE_PRIVATE:
5036 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
5037 continue;
5038 break;
5039 case OMP_CLAUSE_SHARED:
5040 /* Ignore shared directives in teams construct inside
5041 of target construct. */
5042 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5043 && !is_host_teams_ctx (ctx))
5044 continue;
5045 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5047 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5048 || is_global_var (OMP_CLAUSE_DECL (c)));
5049 continue;
5051 case OMP_CLAUSE_FIRSTPRIVATE:
5052 case OMP_CLAUSE_COPYIN:
5053 break;
5054 case OMP_CLAUSE_LINEAR:
5055 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5056 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5057 lastprivate_firstprivate = true;
5058 break;
5059 case OMP_CLAUSE_REDUCTION:
5060 case OMP_CLAUSE_IN_REDUCTION:
5061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5062 || is_task_ctx (ctx)
5063 || OMP_CLAUSE_REDUCTION_TASK (c))
5065 task_reduction_p = true;
5066 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5068 task_reduction_other_cnt++;
5069 if (pass == 2)
5070 continue;
5072 else
5073 task_reduction_cnt++;
5074 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5076 var = OMP_CLAUSE_DECL (c);
5077 /* If var is a global variable that isn't privatized
5078 in outer contexts, we don't need to look up the
5079 original address, it is always the address of the
5080 global variable itself. */
5081 if (!DECL_P (var)
5082 || omp_privatize_by_reference (var)
5083 || !is_global_var
5084 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5086 task_reduction_needs_orig_p = true;
5087 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5088 task_reduction_cntorig++;
5092 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5093 reduction_omp_orig_ref = true;
5094 break;
5095 case OMP_CLAUSE__REDUCTEMP_:
5096 if (!is_taskreg_ctx (ctx))
5097 continue;
5098 /* FALLTHRU */
5099 case OMP_CLAUSE__LOOPTEMP_:
5100 /* Handle _looptemp_/_reductemp_ clauses only on
5101 parallel/task. */
5102 if (fd)
5103 continue;
5104 break;
5105 case OMP_CLAUSE_LASTPRIVATE:
5106 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5108 lastprivate_firstprivate = true;
5109 if (pass != 0 || is_taskloop_ctx (ctx))
5110 continue;
5112 /* Even without corresponding firstprivate, if
5113 decl is Fortran allocatable, it needs outer var
5114 reference. */
5115 else if (pass == 0
5116 && lang_hooks.decls.omp_private_outer_ref
5117 (OMP_CLAUSE_DECL (c)))
5118 lastprivate_firstprivate = true;
5119 break;
5120 case OMP_CLAUSE_ALIGNED:
5121 if (pass != 1)
5122 continue;
5123 var = OMP_CLAUSE_DECL (c);
5124 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5125 && !is_global_var (var))
5127 new_var = maybe_lookup_decl (var, ctx);
5128 if (new_var == NULL_TREE)
5129 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5130 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5131 tree alarg = omp_clause_aligned_alignment (c);
5132 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5133 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5134 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5135 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5136 gimplify_and_add (x, ilist);
5138 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5139 && is_global_var (var))
5141 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5142 new_var = lookup_decl (var, ctx);
5143 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5144 t = build_fold_addr_expr_loc (clause_loc, t);
5145 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5146 tree alarg = omp_clause_aligned_alignment (c);
5147 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5148 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5149 t = fold_convert_loc (clause_loc, ptype, t);
5150 x = create_tmp_var (ptype);
5151 t = build2 (MODIFY_EXPR, ptype, x, t);
5152 gimplify_and_add (t, ilist);
5153 t = build_simple_mem_ref_loc (clause_loc, x);
5154 SET_DECL_VALUE_EXPR (new_var, t);
5155 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5157 continue;
5158 case OMP_CLAUSE__CONDTEMP_:
5159 if (is_parallel_ctx (ctx)
5160 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5161 break;
5162 continue;
5163 default:
5164 continue;
5167 if (task_reduction_p != (pass >= 2))
5168 continue;
5170 allocator = NULL_TREE;
5171 allocate_ptr = NULL_TREE;
5172 new_var = var = OMP_CLAUSE_DECL (c);
5173 if ((c_kind == OMP_CLAUSE_REDUCTION
5174 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5175 && TREE_CODE (var) == MEM_REF)
5177 var = TREE_OPERAND (var, 0);
5178 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5179 var = TREE_OPERAND (var, 0);
5180 if (TREE_CODE (var) == INDIRECT_REF
5181 || TREE_CODE (var) == ADDR_EXPR)
5182 var = TREE_OPERAND (var, 0);
5183 if (is_variable_sized (var))
5185 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5186 var = DECL_VALUE_EXPR (var);
5187 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5188 var = TREE_OPERAND (var, 0);
5189 gcc_assert (DECL_P (var));
5191 new_var = var;
5193 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5195 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5196 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5198 else if (c_kind != OMP_CLAUSE_COPYIN)
5199 new_var = lookup_decl (var, ctx);
5201 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5203 if (pass != 0)
5204 continue;
5206 /* C/C++ array section reductions. */
5207 else if ((c_kind == OMP_CLAUSE_REDUCTION
5208 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5209 && var != OMP_CLAUSE_DECL (c))
5211 if (pass == 0)
5212 continue;
5214 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5215 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5217 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5219 tree b = TREE_OPERAND (orig_var, 1);
5220 if (is_omp_target (ctx->stmt))
5221 b = NULL_TREE;
5222 else
5223 b = maybe_lookup_decl (b, ctx);
5224 if (b == NULL)
5226 b = TREE_OPERAND (orig_var, 1);
5227 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5229 if (integer_zerop (bias))
5230 bias = b;
5231 else
5233 bias = fold_convert_loc (clause_loc,
5234 TREE_TYPE (b), bias);
5235 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5236 TREE_TYPE (b), b, bias);
5238 orig_var = TREE_OPERAND (orig_var, 0);
5240 if (pass == 2)
5242 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5243 if (is_global_var (out)
5244 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5245 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5246 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5247 != POINTER_TYPE)))
5248 x = var;
5249 else if (is_omp_target (ctx->stmt))
5250 x = out;
5251 else
5253 bool by_ref = use_pointer_for_field (var, NULL);
5254 x = build_receiver_ref (var, by_ref, ctx);
5255 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5256 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5257 == POINTER_TYPE))
5258 x = build_fold_addr_expr (x);
5260 if (TREE_CODE (orig_var) == INDIRECT_REF)
5261 x = build_simple_mem_ref (x);
5262 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5264 if (var == TREE_OPERAND (orig_var, 0))
5265 x = build_fold_addr_expr (x);
5267 bias = fold_convert (sizetype, bias);
5268 x = fold_convert (ptr_type_node, x);
5269 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5270 TREE_TYPE (x), x, bias);
5271 unsigned cnt = task_reduction_cnt - 1;
5272 if (!task_reduction_needs_orig_p)
5273 cnt += (task_reduction_cntorig_full
5274 - task_reduction_cntorig);
5275 else
5276 cnt = task_reduction_cntorig - 1;
5277 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5278 size_int (cnt), NULL_TREE, NULL_TREE);
5279 gimplify_assign (r, x, ilist);
5280 continue;
5283 if (TREE_CODE (orig_var) == INDIRECT_REF
5284 || TREE_CODE (orig_var) == ADDR_EXPR)
5285 orig_var = TREE_OPERAND (orig_var, 0);
5286 tree d = OMP_CLAUSE_DECL (c);
5287 tree type = TREE_TYPE (d);
5288 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5289 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5290 tree sz = v;
5291 const char *name = get_name (orig_var);
5292 if (pass != 3 && !TREE_CONSTANT (v))
5294 tree t;
5295 if (is_omp_target (ctx->stmt))
5296 t = NULL_TREE;
5297 else
5298 t = maybe_lookup_decl (v, ctx);
5299 if (t)
5300 v = t;
5301 else
5302 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5303 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5304 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5305 TREE_TYPE (v), v,
5306 build_int_cst (TREE_TYPE (v), 1));
5307 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5308 TREE_TYPE (v), t,
5309 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5311 if (pass == 3)
5313 tree xv = create_tmp_var (ptr_type_node);
5314 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5316 unsigned cnt = task_reduction_cnt - 1;
5317 if (!task_reduction_needs_orig_p)
5318 cnt += (task_reduction_cntorig_full
5319 - task_reduction_cntorig);
5320 else
5321 cnt = task_reduction_cntorig - 1;
5322 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5323 size_int (cnt), NULL_TREE, NULL_TREE);
5325 gimple *g = gimple_build_assign (xv, x);
5326 gimple_seq_add_stmt (ilist, g);
5328 else
5330 unsigned int idx = *ctx->task_reduction_map->get (c);
5331 tree off;
5332 if (ctx->task_reductions[1 + idx])
5333 off = fold_convert (sizetype,
5334 ctx->task_reductions[1 + idx]);
5335 else
5336 off = task_reduction_read (ilist, tskred_temp, sizetype,
5337 7 + 3 * idx + 1);
5338 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5339 tskred_base, off);
5340 gimple_seq_add_stmt (ilist, g);
5342 x = fold_convert (build_pointer_type (boolean_type_node),
5343 xv);
5344 if (TREE_CONSTANT (v))
5345 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5346 TYPE_SIZE_UNIT (type));
5347 else
5349 tree t;
5350 if (is_omp_target (ctx->stmt))
5351 t = NULL_TREE;
5352 else
5353 t = maybe_lookup_decl (v, ctx);
5354 if (t)
5355 v = t;
5356 else
5357 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5358 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5359 fb_rvalue);
5360 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5361 TREE_TYPE (v), v,
5362 build_int_cst (TREE_TYPE (v), 1));
5363 t = fold_build2_loc (clause_loc, MULT_EXPR,
5364 TREE_TYPE (v), t,
5365 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5366 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5368 cond = create_tmp_var (TREE_TYPE (x));
5369 gimplify_assign (cond, x, ilist);
5370 x = xv;
5372 else if (lower_private_allocate (var, type, allocator,
5373 allocate_ptr, ilist, ctx,
5374 true,
5375 TREE_CONSTANT (v)
5376 ? TYPE_SIZE_UNIT (type)
5377 : sz))
5378 x = allocate_ptr;
5379 else if (TREE_CONSTANT (v))
5381 x = create_tmp_var_raw (type, name);
5382 gimple_add_tmp_var (x);
5383 TREE_ADDRESSABLE (x) = 1;
5384 x = build_fold_addr_expr_loc (clause_loc, x);
5386 else
5388 tree atmp
5389 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5390 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5391 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5394 tree ptype = build_pointer_type (TREE_TYPE (type));
5395 x = fold_convert_loc (clause_loc, ptype, x);
5396 tree y = create_tmp_var (ptype, name);
5397 gimplify_assign (y, x, ilist);
5398 x = y;
5399 tree yb = y;
5401 if (!integer_zerop (bias))
5403 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5404 bias);
5405 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5407 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5408 pointer_sized_int_node, yb, bias);
5409 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5410 yb = create_tmp_var (ptype, name);
5411 gimplify_assign (yb, x, ilist);
5412 x = yb;
5415 d = TREE_OPERAND (d, 0);
5416 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5417 d = TREE_OPERAND (d, 0);
5418 if (TREE_CODE (d) == ADDR_EXPR)
5420 if (orig_var != var)
5422 gcc_assert (is_variable_sized (orig_var));
5423 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5425 gimplify_assign (new_var, x, ilist);
5426 tree new_orig_var = lookup_decl (orig_var, ctx);
5427 tree t = build_fold_indirect_ref (new_var);
5428 DECL_IGNORED_P (new_var) = 0;
5429 TREE_THIS_NOTRAP (t) = 1;
5430 SET_DECL_VALUE_EXPR (new_orig_var, t);
5431 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5433 else
5435 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5436 build_int_cst (ptype, 0));
5437 SET_DECL_VALUE_EXPR (new_var, x);
5438 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5441 else
5443 gcc_assert (orig_var == var);
5444 if (TREE_CODE (d) == INDIRECT_REF)
5446 x = create_tmp_var (ptype, name);
5447 TREE_ADDRESSABLE (x) = 1;
5448 gimplify_assign (x, yb, ilist);
5449 x = build_fold_addr_expr_loc (clause_loc, x);
5451 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5452 gimplify_assign (new_var, x, ilist);
5454 /* GOMP_taskgroup_reduction_register memsets the whole
5455 array to zero. If the initializer is zero, we don't
5456 need to initialize it again, just mark it as ever
5457 used unconditionally, i.e. cond = true. */
5458 if (cond
5459 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5460 && initializer_zerop (omp_reduction_init (c,
5461 TREE_TYPE (type))))
5463 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5464 boolean_true_node);
5465 gimple_seq_add_stmt (ilist, g);
5466 continue;
5468 tree end = create_artificial_label (UNKNOWN_LOCATION);
5469 if (cond)
5471 gimple *g;
5472 if (!is_parallel_ctx (ctx))
5474 tree condv = create_tmp_var (boolean_type_node);
5475 g = gimple_build_assign (condv,
5476 build_simple_mem_ref (cond));
5477 gimple_seq_add_stmt (ilist, g);
5478 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5479 g = gimple_build_cond (NE_EXPR, condv,
5480 boolean_false_node, end, lab1);
5481 gimple_seq_add_stmt (ilist, g);
5482 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5484 g = gimple_build_assign (build_simple_mem_ref (cond),
5485 boolean_true_node);
5486 gimple_seq_add_stmt (ilist, g);
5489 tree y1 = create_tmp_var (ptype);
5490 gimplify_assign (y1, y, ilist);
5491 tree i2 = NULL_TREE, y2 = NULL_TREE;
5492 tree body2 = NULL_TREE, end2 = NULL_TREE;
5493 tree y3 = NULL_TREE, y4 = NULL_TREE;
5494 if (task_reduction_needs_orig_p)
5496 y3 = create_tmp_var (ptype);
5497 tree ref;
5498 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5499 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5500 size_int (task_reduction_cnt_full
5501 + task_reduction_cntorig - 1),
5502 NULL_TREE, NULL_TREE);
5503 else
5505 unsigned int idx = *ctx->task_reduction_map->get (c);
5506 ref = task_reduction_read (ilist, tskred_temp, ptype,
5507 7 + 3 * idx);
5509 gimplify_assign (y3, ref, ilist);
5511 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5513 if (pass != 3)
5515 y2 = create_tmp_var (ptype);
5516 gimplify_assign (y2, y, ilist);
5518 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5520 tree ref = build_outer_var_ref (var, ctx);
5521 /* For ref build_outer_var_ref already performs this. */
5522 if (TREE_CODE (d) == INDIRECT_REF)
5523 gcc_assert (omp_privatize_by_reference (var));
5524 else if (TREE_CODE (d) == ADDR_EXPR)
5525 ref = build_fold_addr_expr (ref);
5526 else if (omp_privatize_by_reference (var))
5527 ref = build_fold_addr_expr (ref);
5528 ref = fold_convert_loc (clause_loc, ptype, ref);
5529 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5530 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5532 y3 = create_tmp_var (ptype);
5533 gimplify_assign (y3, unshare_expr (ref), ilist);
5535 if (is_simd)
5537 y4 = create_tmp_var (ptype);
5538 gimplify_assign (y4, ref, dlist);
5542 tree i = create_tmp_var (TREE_TYPE (v));
5543 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5544 tree body = create_artificial_label (UNKNOWN_LOCATION);
5545 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5546 if (y2)
5548 i2 = create_tmp_var (TREE_TYPE (v));
5549 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5550 body2 = create_artificial_label (UNKNOWN_LOCATION);
5551 end2 = create_artificial_label (UNKNOWN_LOCATION);
5552 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5554 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5556 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5557 tree decl_placeholder
5558 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5559 SET_DECL_VALUE_EXPR (decl_placeholder,
5560 build_simple_mem_ref (y1));
5561 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5562 SET_DECL_VALUE_EXPR (placeholder,
5563 y3 ? build_simple_mem_ref (y3)
5564 : error_mark_node);
5565 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5566 x = lang_hooks.decls.omp_clause_default_ctor
5567 (c, build_simple_mem_ref (y1),
5568 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5569 if (x)
5570 gimplify_and_add (x, ilist);
5571 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5573 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5574 lower_omp (&tseq, ctx);
5575 gimple_seq_add_seq (ilist, tseq);
5577 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5578 if (is_simd)
5580 SET_DECL_VALUE_EXPR (decl_placeholder,
5581 build_simple_mem_ref (y2));
5582 SET_DECL_VALUE_EXPR (placeholder,
5583 build_simple_mem_ref (y4));
5584 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5585 lower_omp (&tseq, ctx);
5586 gimple_seq_add_seq (dlist, tseq);
5587 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5589 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5590 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5591 if (y2)
5593 x = lang_hooks.decls.omp_clause_dtor
5594 (c, build_simple_mem_ref (y2));
5595 if (x)
5596 gimplify_and_add (x, dlist);
5599 else
5601 x = omp_reduction_init (c, TREE_TYPE (type));
5602 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5604 /* reduction(-:var) sums up the partial results, so it
5605 acts identically to reduction(+:var). */
5606 if (code == MINUS_EXPR)
5607 code = PLUS_EXPR;
5609 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5610 if (is_simd)
5612 x = build2 (code, TREE_TYPE (type),
5613 build_simple_mem_ref (y4),
5614 build_simple_mem_ref (y2));
5615 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5618 gimple *g
5619 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5620 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5621 gimple_seq_add_stmt (ilist, g);
5622 if (y3)
5624 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5625 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5626 gimple_seq_add_stmt (ilist, g);
5628 g = gimple_build_assign (i, PLUS_EXPR, i,
5629 build_int_cst (TREE_TYPE (i), 1));
5630 gimple_seq_add_stmt (ilist, g);
5631 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5632 gimple_seq_add_stmt (ilist, g);
5633 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5634 if (y2)
5636 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5637 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5638 gimple_seq_add_stmt (dlist, g);
5639 if (y4)
5641 g = gimple_build_assign
5642 (y4, POINTER_PLUS_EXPR, y4,
5643 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5644 gimple_seq_add_stmt (dlist, g);
5646 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5647 build_int_cst (TREE_TYPE (i2), 1));
5648 gimple_seq_add_stmt (dlist, g);
5649 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5650 gimple_seq_add_stmt (dlist, g);
5651 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5653 if (allocator)
5655 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5656 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5657 gimple_seq_add_stmt (dlist, g);
5659 continue;
5661 else if (pass == 2)
5663 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5664 if (is_global_var (out))
5665 x = var;
5666 else if (is_omp_target (ctx->stmt))
5667 x = out;
5668 else
5670 bool by_ref = use_pointer_for_field (var, ctx);
5671 x = build_receiver_ref (var, by_ref, ctx);
5673 if (!omp_privatize_by_reference (var))
5674 x = build_fold_addr_expr (x);
5675 x = fold_convert (ptr_type_node, x);
5676 unsigned cnt = task_reduction_cnt - 1;
5677 if (!task_reduction_needs_orig_p)
5678 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5679 else
5680 cnt = task_reduction_cntorig - 1;
5681 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5682 size_int (cnt), NULL_TREE, NULL_TREE);
5683 gimplify_assign (r, x, ilist);
5684 continue;
5686 else if (pass == 3)
5688 tree type = TREE_TYPE (new_var);
5689 if (!omp_privatize_by_reference (var))
5690 type = build_pointer_type (type);
5691 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5693 unsigned cnt = task_reduction_cnt - 1;
5694 if (!task_reduction_needs_orig_p)
5695 cnt += (task_reduction_cntorig_full
5696 - task_reduction_cntorig);
5697 else
5698 cnt = task_reduction_cntorig - 1;
5699 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5700 size_int (cnt), NULL_TREE, NULL_TREE);
5702 else
5704 unsigned int idx = *ctx->task_reduction_map->get (c);
5705 tree off;
5706 if (ctx->task_reductions[1 + idx])
5707 off = fold_convert (sizetype,
5708 ctx->task_reductions[1 + idx]);
5709 else
5710 off = task_reduction_read (ilist, tskred_temp, sizetype,
5711 7 + 3 * idx + 1);
5712 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5713 tskred_base, off);
5715 x = fold_convert (type, x);
5716 tree t;
5717 if (omp_privatize_by_reference (var))
5719 gimplify_assign (new_var, x, ilist);
5720 t = new_var;
5721 new_var = build_simple_mem_ref (new_var);
5723 else
5725 t = create_tmp_var (type);
5726 gimplify_assign (t, x, ilist);
5727 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5728 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5730 t = fold_convert (build_pointer_type (boolean_type_node), t);
5731 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5732 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5733 cond = create_tmp_var (TREE_TYPE (t));
5734 gimplify_assign (cond, t, ilist);
5736 else if (is_variable_sized (var))
5738 /* For variable sized types, we need to allocate the
5739 actual storage here. Call alloca and store the
5740 result in the pointer decl that we created elsewhere. */
5741 if (pass == 0)
5742 continue;
5744 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5746 tree tmp;
5748 ptr = DECL_VALUE_EXPR (new_var);
5749 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5750 ptr = TREE_OPERAND (ptr, 0);
5751 gcc_assert (DECL_P (ptr));
5752 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5754 if (lower_private_allocate (var, new_var, allocator,
5755 allocate_ptr, ilist, ctx,
5756 false, x))
5757 tmp = allocate_ptr;
5758 else
5760 /* void *tmp = __builtin_alloca */
5761 tree atmp
5762 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5763 gcall *stmt
5764 = gimple_build_call (atmp, 2, x,
5765 size_int (DECL_ALIGN (var)));
5766 cfun->calls_alloca = 1;
5767 tmp = create_tmp_var_raw (ptr_type_node);
5768 gimple_add_tmp_var (tmp);
5769 gimple_call_set_lhs (stmt, tmp);
5771 gimple_seq_add_stmt (ilist, stmt);
5774 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5775 gimplify_assign (ptr, x, ilist);
5778 else if (omp_privatize_by_reference (var)
5779 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5780 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5782 /* For references that are being privatized for Fortran,
5783 allocate new backing storage for the new pointer
5784 variable. This allows us to avoid changing all the
5785 code that expects a pointer to something that expects
5786 a direct variable. */
5787 if (pass == 0)
5788 continue;
5790 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5791 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5793 x = build_receiver_ref (var, false, ctx);
5794 if (ctx->allocate_map)
5795 if (tree *allocatep = ctx->allocate_map->get (var))
5797 allocator = *allocatep;
5798 if (TREE_CODE (allocator) == TREE_LIST)
5799 allocator = TREE_PURPOSE (allocator);
5800 if (TREE_CODE (allocator) != INTEGER_CST)
5801 allocator = build_outer_var_ref (allocator, ctx);
5802 allocator = fold_convert (pointer_sized_int_node,
5803 allocator);
5804 allocate_ptr = unshare_expr (x);
5806 if (allocator == NULL_TREE)
5807 x = build_fold_addr_expr_loc (clause_loc, x);
5809 else if (lower_private_allocate (var, new_var, allocator,
5810 allocate_ptr,
5811 ilist, ctx, true, x))
5812 x = allocate_ptr;
5813 else if (TREE_CONSTANT (x))
5815 /* For reduction in SIMD loop, defer adding the
5816 initialization of the reference, because if we decide
5817 to use SIMD array for it, the initilization could cause
5818 expansion ICE. Ditto for other privatization clauses. */
5819 if (is_simd)
5820 x = NULL_TREE;
5821 else
5823 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5824 get_name (var));
5825 gimple_add_tmp_var (x);
5826 TREE_ADDRESSABLE (x) = 1;
5827 x = build_fold_addr_expr_loc (clause_loc, x);
5830 else
5832 tree atmp
5833 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5834 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5835 tree al = size_int (TYPE_ALIGN (rtype));
5836 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5839 if (x)
5841 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5842 gimplify_assign (new_var, x, ilist);
5845 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5847 else if ((c_kind == OMP_CLAUSE_REDUCTION
5848 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5849 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5851 if (pass == 0)
5852 continue;
5854 else if (pass != 0)
5855 continue;
5857 switch (OMP_CLAUSE_CODE (c))
5859 case OMP_CLAUSE_SHARED:
5860 /* Ignore shared directives in teams construct inside
5861 target construct. */
5862 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5863 && !is_host_teams_ctx (ctx))
5864 continue;
5865 /* Shared global vars are just accessed directly. */
5866 if (is_global_var (new_var))
5867 break;
5868 /* For taskloop firstprivate/lastprivate, represented
5869 as firstprivate and shared clause on the task, new_var
5870 is the firstprivate var. */
5871 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5872 break;
5873 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5874 needs to be delayed until after fixup_child_record_type so
5875 that we get the correct type during the dereference. */
5876 by_ref = use_pointer_for_field (var, ctx);
5877 x = build_receiver_ref (var, by_ref, ctx);
5878 SET_DECL_VALUE_EXPR (new_var, x);
5879 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5881 /* ??? If VAR is not passed by reference, and the variable
5882 hasn't been initialized yet, then we'll get a warning for
5883 the store into the omp_data_s structure. Ideally, we'd be
5884 able to notice this and not store anything at all, but
5885 we're generating code too early. Suppress the warning. */
5886 if (!by_ref)
5887 suppress_warning (var, OPT_Wuninitialized);
5888 break;
5890 case OMP_CLAUSE__CONDTEMP_:
5891 if (is_parallel_ctx (ctx))
5893 x = build_receiver_ref (var, false, ctx);
5894 SET_DECL_VALUE_EXPR (new_var, x);
5895 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5897 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5899 x = build_zero_cst (TREE_TYPE (var));
5900 goto do_private;
5902 break;
5904 case OMP_CLAUSE_LASTPRIVATE:
5905 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5906 break;
5907 /* FALLTHRU */
5909 case OMP_CLAUSE_PRIVATE:
5910 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5911 x = build_outer_var_ref (var, ctx);
5912 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5914 if (is_task_ctx (ctx))
5915 x = build_receiver_ref (var, false, ctx);
5916 else
5917 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5919 else
5920 x = NULL;
5921 do_private:
5922 tree nx;
5923 bool copy_ctor;
5924 copy_ctor = false;
5925 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5926 ilist, ctx, false, NULL_TREE);
5927 nx = unshare_expr (new_var);
5928 if (is_simd
5929 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5930 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5931 copy_ctor = true;
5932 if (copy_ctor)
5933 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5934 else
5935 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5936 if (is_simd)
5938 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5939 if ((TREE_ADDRESSABLE (new_var) || nx || y
5940 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5941 && (gimple_omp_for_collapse (ctx->stmt) != 1
5942 || (gimple_omp_for_index (ctx->stmt, 0)
5943 != new_var)))
5944 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5945 || omp_privatize_by_reference (var))
5946 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5947 ivar, lvar))
5949 if (omp_privatize_by_reference (var))
5951 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5952 tree new_vard = TREE_OPERAND (new_var, 0);
5953 gcc_assert (DECL_P (new_vard));
5954 SET_DECL_VALUE_EXPR (new_vard,
5955 build_fold_addr_expr (lvar));
5956 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5959 if (nx)
5961 tree iv = unshare_expr (ivar);
5962 if (copy_ctor)
5963 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5965 else
5966 x = lang_hooks.decls.omp_clause_default_ctor (c,
5970 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5972 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5973 unshare_expr (ivar), x);
5974 nx = x;
5976 if (nx && x)
5977 gimplify_and_add (x, &llist[0]);
5978 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5979 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5981 tree v = new_var;
5982 if (!DECL_P (v))
5984 gcc_assert (TREE_CODE (v) == MEM_REF);
5985 v = TREE_OPERAND (v, 0);
5986 gcc_assert (DECL_P (v));
5988 v = *ctx->lastprivate_conditional_map->get (v);
5989 tree t = create_tmp_var (TREE_TYPE (v));
5990 tree z = build_zero_cst (TREE_TYPE (v));
5991 tree orig_v
5992 = build_outer_var_ref (var, ctx,
5993 OMP_CLAUSE_LASTPRIVATE);
5994 gimple_seq_add_stmt (dlist,
5995 gimple_build_assign (t, z));
5996 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5997 tree civar = DECL_VALUE_EXPR (v);
5998 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5999 civar = unshare_expr (civar);
6000 TREE_OPERAND (civar, 1) = sctx.idx;
6001 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
6002 unshare_expr (civar));
6003 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
6004 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
6005 orig_v, unshare_expr (ivar)));
6006 tree cond = build2 (LT_EXPR, boolean_type_node, t,
6007 civar);
6008 x = build3 (COND_EXPR, void_type_node, cond, x,
6009 void_node);
6010 gimple_seq tseq = NULL;
6011 gimplify_and_add (x, &tseq);
6012 if (ctx->outer)
6013 lower_omp (&tseq, ctx->outer);
6014 gimple_seq_add_seq (&llist[1], tseq);
6016 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6017 && ctx->for_simd_scan_phase)
6019 x = unshare_expr (ivar);
6020 tree orig_v
6021 = build_outer_var_ref (var, ctx,
6022 OMP_CLAUSE_LASTPRIVATE);
6023 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6024 orig_v);
6025 gimplify_and_add (x, &llist[0]);
6027 if (y)
6029 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
6030 if (y)
6031 gimplify_and_add (y, &llist[1]);
6033 break;
6035 if (omp_privatize_by_reference (var))
6037 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6038 tree new_vard = TREE_OPERAND (new_var, 0);
6039 gcc_assert (DECL_P (new_vard));
6040 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6041 x = TYPE_SIZE_UNIT (type);
6042 if (TREE_CONSTANT (x))
6044 x = create_tmp_var_raw (type, get_name (var));
6045 gimple_add_tmp_var (x);
6046 TREE_ADDRESSABLE (x) = 1;
6047 x = build_fold_addr_expr_loc (clause_loc, x);
6048 x = fold_convert_loc (clause_loc,
6049 TREE_TYPE (new_vard), x);
6050 gimplify_assign (new_vard, x, ilist);
6054 if (nx)
6055 gimplify_and_add (nx, ilist);
6056 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6057 && is_simd
6058 && ctx->for_simd_scan_phase)
6060 tree orig_v = build_outer_var_ref (var, ctx,
6061 OMP_CLAUSE_LASTPRIVATE);
6062 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6063 orig_v);
6064 gimplify_and_add (x, ilist);
6066 /* FALLTHRU */
6068 do_dtor:
6069 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6070 if (x)
6071 gimplify_and_add (x, dlist);
6072 if (allocator)
6074 if (!is_gimple_val (allocator))
6076 tree avar = create_tmp_var (TREE_TYPE (allocator));
6077 gimplify_assign (avar, allocator, dlist);
6078 allocator = avar;
6080 if (!is_gimple_val (allocate_ptr))
6082 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6083 gimplify_assign (apvar, allocate_ptr, dlist);
6084 allocate_ptr = apvar;
6086 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6087 gimple *g
6088 = gimple_build_call (f, 2, allocate_ptr, allocator);
6089 gimple_seq_add_stmt (dlist, g);
6091 break;
6093 case OMP_CLAUSE_LINEAR:
6094 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6095 goto do_firstprivate;
6096 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6097 x = NULL;
6098 else
6099 x = build_outer_var_ref (var, ctx);
6100 goto do_private;
6102 case OMP_CLAUSE_FIRSTPRIVATE:
6103 if (is_task_ctx (ctx))
6105 if ((omp_privatize_by_reference (var)
6106 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6107 || is_variable_sized (var))
6108 goto do_dtor;
6109 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6110 ctx))
6111 || use_pointer_for_field (var, NULL))
6113 x = build_receiver_ref (var, false, ctx);
6114 if (ctx->allocate_map)
6115 if (tree *allocatep = ctx->allocate_map->get (var))
6117 allocator = *allocatep;
6118 if (TREE_CODE (allocator) == TREE_LIST)
6119 allocator = TREE_PURPOSE (allocator);
6120 if (TREE_CODE (allocator) != INTEGER_CST)
6121 allocator = build_outer_var_ref (allocator, ctx);
6122 allocator = fold_convert (pointer_sized_int_node,
6123 allocator);
6124 allocate_ptr = unshare_expr (x);
6125 x = build_simple_mem_ref (x);
6126 TREE_THIS_NOTRAP (x) = 1;
6128 SET_DECL_VALUE_EXPR (new_var, x);
6129 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6130 goto do_dtor;
6133 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6134 && omp_privatize_by_reference (var))
6136 x = build_outer_var_ref (var, ctx);
6137 gcc_assert (TREE_CODE (x) == MEM_REF
6138 && integer_zerop (TREE_OPERAND (x, 1)));
6139 x = TREE_OPERAND (x, 0);
6140 x = lang_hooks.decls.omp_clause_copy_ctor
6141 (c, unshare_expr (new_var), x);
6142 gimplify_and_add (x, ilist);
6143 goto do_dtor;
6145 do_firstprivate:
6146 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6147 ilist, ctx, false, NULL_TREE);
6148 x = build_outer_var_ref (var, ctx);
6149 if (is_simd)
6151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6152 && gimple_omp_for_combined_into_p (ctx->stmt))
6154 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6155 tree stept = TREE_TYPE (t);
6156 tree ct = omp_find_clause (clauses,
6157 OMP_CLAUSE__LOOPTEMP_);
6158 gcc_assert (ct);
6159 tree l = OMP_CLAUSE_DECL (ct);
6160 tree n1 = fd->loop.n1;
6161 tree step = fd->loop.step;
6162 tree itype = TREE_TYPE (l);
6163 if (POINTER_TYPE_P (itype))
6164 itype = signed_type_for (itype);
6165 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6166 if (TYPE_UNSIGNED (itype)
6167 && fd->loop.cond_code == GT_EXPR)
6168 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6169 fold_build1 (NEGATE_EXPR, itype, l),
6170 fold_build1 (NEGATE_EXPR,
6171 itype, step));
6172 else
6173 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6174 t = fold_build2 (MULT_EXPR, stept,
6175 fold_convert (stept, l), t);
6177 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6179 if (omp_privatize_by_reference (var))
6181 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6182 tree new_vard = TREE_OPERAND (new_var, 0);
6183 gcc_assert (DECL_P (new_vard));
6184 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6185 nx = TYPE_SIZE_UNIT (type);
6186 if (TREE_CONSTANT (nx))
6188 nx = create_tmp_var_raw (type,
6189 get_name (var));
6190 gimple_add_tmp_var (nx);
6191 TREE_ADDRESSABLE (nx) = 1;
6192 nx = build_fold_addr_expr_loc (clause_loc,
6193 nx);
6194 nx = fold_convert_loc (clause_loc,
6195 TREE_TYPE (new_vard),
6196 nx);
6197 gimplify_assign (new_vard, nx, ilist);
6201 x = lang_hooks.decls.omp_clause_linear_ctor
6202 (c, new_var, x, t);
6203 gimplify_and_add (x, ilist);
6204 goto do_dtor;
6207 if (POINTER_TYPE_P (TREE_TYPE (x)))
6208 x = fold_build2 (POINTER_PLUS_EXPR,
6209 TREE_TYPE (x), x, t);
6210 else
6211 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6214 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6215 || TREE_ADDRESSABLE (new_var)
6216 || omp_privatize_by_reference (var))
6217 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6218 ivar, lvar))
6220 if (omp_privatize_by_reference (var))
6222 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6223 tree new_vard = TREE_OPERAND (new_var, 0);
6224 gcc_assert (DECL_P (new_vard));
6225 SET_DECL_VALUE_EXPR (new_vard,
6226 build_fold_addr_expr (lvar));
6227 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6231 tree iv = create_tmp_var (TREE_TYPE (new_var));
6232 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6233 gimplify_and_add (x, ilist);
6234 gimple_stmt_iterator gsi
6235 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6236 gassign *g
6237 = gimple_build_assign (unshare_expr (lvar), iv);
6238 gsi_insert_before_without_update (&gsi, g,
6239 GSI_SAME_STMT);
6240 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6241 enum tree_code code = PLUS_EXPR;
6242 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6243 code = POINTER_PLUS_EXPR;
6244 g = gimple_build_assign (iv, code, iv, t);
6245 gsi_insert_before_without_update (&gsi, g,
6246 GSI_SAME_STMT);
6247 break;
6249 x = lang_hooks.decls.omp_clause_copy_ctor
6250 (c, unshare_expr (ivar), x);
6251 gimplify_and_add (x, &llist[0]);
6252 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6253 if (x)
6254 gimplify_and_add (x, &llist[1]);
6255 break;
6257 if (omp_privatize_by_reference (var))
6259 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6260 tree new_vard = TREE_OPERAND (new_var, 0);
6261 gcc_assert (DECL_P (new_vard));
6262 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6263 nx = TYPE_SIZE_UNIT (type);
6264 if (TREE_CONSTANT (nx))
6266 nx = create_tmp_var_raw (type, get_name (var));
6267 gimple_add_tmp_var (nx);
6268 TREE_ADDRESSABLE (nx) = 1;
6269 nx = build_fold_addr_expr_loc (clause_loc, nx);
6270 nx = fold_convert_loc (clause_loc,
6271 TREE_TYPE (new_vard), nx);
6272 gimplify_assign (new_vard, nx, ilist);
6276 x = lang_hooks.decls.omp_clause_copy_ctor
6277 (c, unshare_expr (new_var), x);
6278 gimplify_and_add (x, ilist);
6279 goto do_dtor;
6281 case OMP_CLAUSE__LOOPTEMP_:
6282 case OMP_CLAUSE__REDUCTEMP_:
6283 gcc_assert (is_taskreg_ctx (ctx));
6284 x = build_outer_var_ref (var, ctx);
6285 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6286 gimplify_and_add (x, ilist);
6287 break;
6289 case OMP_CLAUSE_COPYIN:
6290 by_ref = use_pointer_for_field (var, NULL);
6291 x = build_receiver_ref (var, by_ref, ctx);
6292 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6293 append_to_statement_list (x, &copyin_seq);
6294 copyin_by_ref |= by_ref;
6295 break;
6297 case OMP_CLAUSE_REDUCTION:
6298 case OMP_CLAUSE_IN_REDUCTION:
6299 /* OpenACC reductions are initialized using the
6300 GOACC_REDUCTION internal function. */
6301 if (is_gimple_omp_oacc (ctx->stmt))
6302 break;
6303 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6305 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6306 gimple *tseq;
6307 tree ptype = TREE_TYPE (placeholder);
6308 if (cond)
6310 x = error_mark_node;
6311 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6312 && !task_reduction_needs_orig_p)
6313 x = var;
6314 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6316 tree pptype = build_pointer_type (ptype);
6317 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6318 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6319 size_int (task_reduction_cnt_full
6320 + task_reduction_cntorig - 1),
6321 NULL_TREE, NULL_TREE);
6322 else
6324 unsigned int idx
6325 = *ctx->task_reduction_map->get (c);
6326 x = task_reduction_read (ilist, tskred_temp,
6327 pptype, 7 + 3 * idx);
6329 x = fold_convert (pptype, x);
6330 x = build_simple_mem_ref (x);
6333 else
6335 lower_private_allocate (var, new_var, allocator,
6336 allocate_ptr, ilist, ctx, false,
6337 NULL_TREE);
6338 x = build_outer_var_ref (var, ctx);
6340 if (omp_privatize_by_reference (var)
6341 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6342 x = build_fold_addr_expr_loc (clause_loc, x);
6344 SET_DECL_VALUE_EXPR (placeholder, x);
6345 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6346 tree new_vard = new_var;
6347 if (omp_privatize_by_reference (var))
6349 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6350 new_vard = TREE_OPERAND (new_var, 0);
6351 gcc_assert (DECL_P (new_vard));
6353 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6354 if (is_simd
6355 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6356 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6357 rvarp = &rvar;
6358 if (is_simd
6359 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6360 ivar, lvar, rvarp,
6361 &rvar2))
6363 if (new_vard == new_var)
6365 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6366 SET_DECL_VALUE_EXPR (new_var, ivar);
6368 else
6370 SET_DECL_VALUE_EXPR (new_vard,
6371 build_fold_addr_expr (ivar));
6372 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6374 x = lang_hooks.decls.omp_clause_default_ctor
6375 (c, unshare_expr (ivar),
6376 build_outer_var_ref (var, ctx));
6377 if (rvarp && ctx->for_simd_scan_phase)
6379 if (x)
6380 gimplify_and_add (x, &llist[0]);
6381 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6382 if (x)
6383 gimplify_and_add (x, &llist[1]);
6384 break;
6386 else if (rvarp)
6388 if (x)
6390 gimplify_and_add (x, &llist[0]);
6392 tree ivar2 = unshare_expr (lvar);
6393 TREE_OPERAND (ivar2, 1) = sctx.idx;
6394 x = lang_hooks.decls.omp_clause_default_ctor
6395 (c, ivar2, build_outer_var_ref (var, ctx));
6396 gimplify_and_add (x, &llist[0]);
6398 if (rvar2)
6400 x = lang_hooks.decls.omp_clause_default_ctor
6401 (c, unshare_expr (rvar2),
6402 build_outer_var_ref (var, ctx));
6403 gimplify_and_add (x, &llist[0]);
6406 /* For types that need construction, add another
6407 private var which will be default constructed
6408 and optionally initialized with
6409 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6410 loop we want to assign this value instead of
6411 constructing and destructing it in each
6412 iteration. */
6413 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6414 gimple_add_tmp_var (nv);
6415 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6416 ? rvar2
6417 : ivar, 0),
6418 nv);
6419 x = lang_hooks.decls.omp_clause_default_ctor
6420 (c, nv, build_outer_var_ref (var, ctx));
6421 gimplify_and_add (x, ilist);
6423 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6425 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6426 x = DECL_VALUE_EXPR (new_vard);
6427 tree vexpr = nv;
6428 if (new_vard != new_var)
6429 vexpr = build_fold_addr_expr (nv);
6430 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6431 lower_omp (&tseq, ctx);
6432 SET_DECL_VALUE_EXPR (new_vard, x);
6433 gimple_seq_add_seq (ilist, tseq);
6434 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6437 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6438 if (x)
6439 gimplify_and_add (x, dlist);
6442 tree ref = build_outer_var_ref (var, ctx);
6443 x = unshare_expr (ivar);
6444 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6445 ref);
6446 gimplify_and_add (x, &llist[0]);
6448 ref = build_outer_var_ref (var, ctx);
6449 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6450 rvar);
6451 gimplify_and_add (x, &llist[3]);
6453 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6454 if (new_vard == new_var)
6455 SET_DECL_VALUE_EXPR (new_var, lvar);
6456 else
6457 SET_DECL_VALUE_EXPR (new_vard,
6458 build_fold_addr_expr (lvar));
6460 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6461 if (x)
6462 gimplify_and_add (x, &llist[1]);
6464 tree ivar2 = unshare_expr (lvar);
6465 TREE_OPERAND (ivar2, 1) = sctx.idx;
6466 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6467 if (x)
6468 gimplify_and_add (x, &llist[1]);
6470 if (rvar2)
6472 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6473 if (x)
6474 gimplify_and_add (x, &llist[1]);
6476 break;
6478 if (x)
6479 gimplify_and_add (x, &llist[0]);
6480 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6482 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6483 lower_omp (&tseq, ctx);
6484 gimple_seq_add_seq (&llist[0], tseq);
6486 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6487 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6488 lower_omp (&tseq, ctx);
6489 gimple_seq_add_seq (&llist[1], tseq);
6490 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6491 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6492 if (new_vard == new_var)
6493 SET_DECL_VALUE_EXPR (new_var, lvar);
6494 else
6495 SET_DECL_VALUE_EXPR (new_vard,
6496 build_fold_addr_expr (lvar));
6497 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6498 if (x)
6499 gimplify_and_add (x, &llist[1]);
6500 break;
6502 /* If this is a reference to constant size reduction var
6503 with placeholder, we haven't emitted the initializer
6504 for it because it is undesirable if SIMD arrays are used.
6505 But if they aren't used, we need to emit the deferred
6506 initialization now. */
6507 else if (omp_privatize_by_reference (var) && is_simd)
6508 handle_simd_reference (clause_loc, new_vard, ilist);
6510 tree lab2 = NULL_TREE;
6511 if (cond)
6513 gimple *g;
6514 if (!is_parallel_ctx (ctx))
6516 tree condv = create_tmp_var (boolean_type_node);
6517 tree m = build_simple_mem_ref (cond);
6518 g = gimple_build_assign (condv, m);
6519 gimple_seq_add_stmt (ilist, g);
6520 tree lab1
6521 = create_artificial_label (UNKNOWN_LOCATION);
6522 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6523 g = gimple_build_cond (NE_EXPR, condv,
6524 boolean_false_node,
6525 lab2, lab1);
6526 gimple_seq_add_stmt (ilist, g);
6527 gimple_seq_add_stmt (ilist,
6528 gimple_build_label (lab1));
6530 g = gimple_build_assign (build_simple_mem_ref (cond),
6531 boolean_true_node);
6532 gimple_seq_add_stmt (ilist, g);
6534 x = lang_hooks.decls.omp_clause_default_ctor
6535 (c, unshare_expr (new_var),
6536 cond ? NULL_TREE
6537 : build_outer_var_ref (var, ctx));
6538 if (x)
6539 gimplify_and_add (x, ilist);
6541 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6542 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6544 if (ctx->for_simd_scan_phase)
6545 goto do_dtor;
6546 if (x || (!is_simd
6547 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6549 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6550 gimple_add_tmp_var (nv);
6551 ctx->cb.decl_map->put (new_vard, nv);
6552 x = lang_hooks.decls.omp_clause_default_ctor
6553 (c, nv, build_outer_var_ref (var, ctx));
6554 if (x)
6555 gimplify_and_add (x, ilist);
6556 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6558 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6559 tree vexpr = nv;
6560 if (new_vard != new_var)
6561 vexpr = build_fold_addr_expr (nv);
6562 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6563 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6564 lower_omp (&tseq, ctx);
6565 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6566 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6567 gimple_seq_add_seq (ilist, tseq);
6569 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6570 if (is_simd && ctx->scan_exclusive)
6572 tree nv2
6573 = create_tmp_var_raw (TREE_TYPE (new_var));
6574 gimple_add_tmp_var (nv2);
6575 ctx->cb.decl_map->put (nv, nv2);
6576 x = lang_hooks.decls.omp_clause_default_ctor
6577 (c, nv2, build_outer_var_ref (var, ctx));
6578 gimplify_and_add (x, ilist);
6579 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6580 if (x)
6581 gimplify_and_add (x, dlist);
6583 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6584 if (x)
6585 gimplify_and_add (x, dlist);
6587 else if (is_simd
6588 && ctx->scan_exclusive
6589 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6591 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6592 gimple_add_tmp_var (nv2);
6593 ctx->cb.decl_map->put (new_vard, nv2);
6594 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6595 if (x)
6596 gimplify_and_add (x, dlist);
6598 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6599 goto do_dtor;
6602 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6604 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6605 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6606 && is_omp_target (ctx->stmt))
6608 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6609 tree oldv = NULL_TREE;
6610 gcc_assert (d);
6611 if (DECL_HAS_VALUE_EXPR_P (d))
6612 oldv = DECL_VALUE_EXPR (d);
6613 SET_DECL_VALUE_EXPR (d, new_vard);
6614 DECL_HAS_VALUE_EXPR_P (d) = 1;
6615 lower_omp (&tseq, ctx);
6616 if (oldv)
6617 SET_DECL_VALUE_EXPR (d, oldv);
6618 else
6620 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6621 DECL_HAS_VALUE_EXPR_P (d) = 0;
6624 else
6625 lower_omp (&tseq, ctx);
6626 gimple_seq_add_seq (ilist, tseq);
6628 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6629 if (is_simd)
6631 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6632 lower_omp (&tseq, ctx);
6633 gimple_seq_add_seq (dlist, tseq);
6634 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6636 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6637 if (cond)
6639 if (lab2)
6640 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6641 break;
6643 goto do_dtor;
6645 else
6647 x = omp_reduction_init (c, TREE_TYPE (new_var));
6648 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6649 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6651 if (cond)
6653 gimple *g;
6654 tree lab2 = NULL_TREE;
6655 /* GOMP_taskgroup_reduction_register memsets the whole
6656 array to zero. If the initializer is zero, we don't
6657 need to initialize it again, just mark it as ever
6658 used unconditionally, i.e. cond = true. */
6659 if (initializer_zerop (x))
6661 g = gimple_build_assign (build_simple_mem_ref (cond),
6662 boolean_true_node);
6663 gimple_seq_add_stmt (ilist, g);
6664 break;
6667 /* Otherwise, emit
6668 if (!cond) { cond = true; new_var = x; } */
6669 if (!is_parallel_ctx (ctx))
6671 tree condv = create_tmp_var (boolean_type_node);
6672 tree m = build_simple_mem_ref (cond);
6673 g = gimple_build_assign (condv, m);
6674 gimple_seq_add_stmt (ilist, g);
6675 tree lab1
6676 = create_artificial_label (UNKNOWN_LOCATION);
6677 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6678 g = gimple_build_cond (NE_EXPR, condv,
6679 boolean_false_node,
6680 lab2, lab1);
6681 gimple_seq_add_stmt (ilist, g);
6682 gimple_seq_add_stmt (ilist,
6683 gimple_build_label (lab1));
6685 g = gimple_build_assign (build_simple_mem_ref (cond),
6686 boolean_true_node);
6687 gimple_seq_add_stmt (ilist, g);
6688 gimplify_assign (new_var, x, ilist);
6689 if (lab2)
6690 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6691 break;
6694 /* reduction(-:var) sums up the partial results, so it
6695 acts identically to reduction(+:var). */
6696 if (code == MINUS_EXPR)
6697 code = PLUS_EXPR;
6699 bool is_truth_op
6700 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6701 tree new_vard = new_var;
6702 if (is_simd && omp_privatize_by_reference (var))
6704 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6705 new_vard = TREE_OPERAND (new_var, 0);
6706 gcc_assert (DECL_P (new_vard));
6708 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6709 if (is_simd
6710 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6711 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6712 rvarp = &rvar;
6713 if (is_simd
6714 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6715 ivar, lvar, rvarp,
6716 &rvar2))
6718 if (new_vard != new_var)
6720 SET_DECL_VALUE_EXPR (new_vard,
6721 build_fold_addr_expr (lvar));
6722 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6725 tree ref = build_outer_var_ref (var, ctx);
6727 if (rvarp)
6729 if (ctx->for_simd_scan_phase)
6730 break;
6731 gimplify_assign (ivar, ref, &llist[0]);
6732 ref = build_outer_var_ref (var, ctx);
6733 gimplify_assign (ref, rvar, &llist[3]);
6734 break;
6737 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6739 if (sctx.is_simt)
6741 if (!simt_lane)
6742 simt_lane = create_tmp_var (unsigned_type_node);
6743 x = build_call_expr_internal_loc
6744 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6745 TREE_TYPE (ivar), 2, ivar, simt_lane);
6746 /* Make sure x is evaluated unconditionally. */
6747 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6748 gimplify_assign (bfly_var, x, &llist[2]);
6749 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6750 gimplify_assign (ivar, x, &llist[2]);
6752 tree ivar2 = ivar;
6753 tree ref2 = ref;
6754 if (is_truth_op)
6756 tree zero = build_zero_cst (TREE_TYPE (ivar));
6757 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6758 boolean_type_node, ivar,
6759 zero);
6760 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6761 boolean_type_node, ref,
6762 zero);
6764 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6765 if (is_truth_op)
6766 x = fold_convert (TREE_TYPE (ref), x);
6767 ref = build_outer_var_ref (var, ctx);
6768 gimplify_assign (ref, x, &llist[1]);
6771 else
6773 lower_private_allocate (var, new_var, allocator,
6774 allocate_ptr, ilist, ctx,
6775 false, NULL_TREE);
6776 if (omp_privatize_by_reference (var) && is_simd)
6777 handle_simd_reference (clause_loc, new_vard, ilist);
6778 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6779 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6780 break;
6781 gimplify_assign (new_var, x, ilist);
6782 if (is_simd)
6784 tree ref = build_outer_var_ref (var, ctx);
6785 tree new_var2 = new_var;
6786 tree ref2 = ref;
6787 if (is_truth_op)
6789 tree zero = build_zero_cst (TREE_TYPE (new_var));
6790 new_var2
6791 = fold_build2_loc (clause_loc, NE_EXPR,
6792 boolean_type_node, new_var,
6793 zero);
6794 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6795 boolean_type_node, ref,
6796 zero);
6798 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6799 if (is_truth_op)
6800 x = fold_convert (TREE_TYPE (new_var), x);
6801 ref = build_outer_var_ref (var, ctx);
6802 gimplify_assign (ref, x, dlist);
6804 if (allocator)
6805 goto do_dtor;
6808 break;
6810 default:
6811 gcc_unreachable ();
6815 if (tskred_avar)
6817 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6818 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6821 if (known_eq (sctx.max_vf, 1U))
6823 sctx.is_simt = false;
6824 if (ctx->lastprivate_conditional_map)
6826 if (gimple_omp_for_combined_into_p (ctx->stmt))
6828 /* Signal to lower_omp_1 that it should use parent context. */
6829 ctx->combined_into_simd_safelen1 = true;
6830 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6831 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6832 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6834 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6835 omp_context *outer = ctx->outer;
6836 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6837 outer = outer->outer;
6838 tree *v = ctx->lastprivate_conditional_map->get (o);
6839 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6840 tree *pv = outer->lastprivate_conditional_map->get (po);
6841 *v = *pv;
6844 else
6846 /* When not vectorized, treat lastprivate(conditional:) like
6847 normal lastprivate, as there will be just one simd lane
6848 writing the privatized variable. */
6849 delete ctx->lastprivate_conditional_map;
6850 ctx->lastprivate_conditional_map = NULL;
6855 if (nonconst_simd_if)
6857 if (sctx.lane == NULL_TREE)
6859 sctx.idx = create_tmp_var (unsigned_type_node);
6860 sctx.lane = create_tmp_var (unsigned_type_node);
6862 /* FIXME: For now. */
6863 sctx.is_simt = false;
6866 if (sctx.lane || sctx.is_simt)
6868 uid = create_tmp_var (ptr_type_node, "simduid");
6869 /* Don't want uninit warnings on simduid, it is always uninitialized,
6870 but we use it not for the value, but for the DECL_UID only. */
6871 suppress_warning (uid, OPT_Wuninitialized);
6872 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6873 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6874 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6875 gimple_omp_for_set_clauses (ctx->stmt, c);
6877 /* Emit calls denoting privatized variables and initializing a pointer to
6878 structure that holds private variables as fields after ompdevlow pass. */
6879 if (sctx.is_simt)
6881 sctx.simt_eargs[0] = uid;
6882 gimple *g
6883 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6884 gimple_call_set_lhs (g, uid);
6885 gimple_seq_add_stmt (ilist, g);
6886 sctx.simt_eargs.release ();
6888 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6889 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6890 gimple_call_set_lhs (g, simtrec);
6891 gimple_seq_add_stmt (ilist, g);
6893 if (sctx.lane)
6895 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6896 2 + (nonconst_simd_if != NULL),
6897 uid, integer_zero_node,
6898 nonconst_simd_if);
6899 gimple_call_set_lhs (g, sctx.lane);
6900 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6901 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6902 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6903 build_int_cst (unsigned_type_node, 0));
6904 gimple_seq_add_stmt (ilist, g);
6905 if (sctx.lastlane)
6907 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6908 2, uid, sctx.lane);
6909 gimple_call_set_lhs (g, sctx.lastlane);
6910 gimple_seq_add_stmt (dlist, g);
6911 gimple_seq_add_seq (dlist, llist[3]);
6913 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6914 if (llist[2])
6916 tree simt_vf = create_tmp_var (unsigned_type_node);
6917 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6918 gimple_call_set_lhs (g, simt_vf);
6919 gimple_seq_add_stmt (dlist, g);
6921 tree t = build_int_cst (unsigned_type_node, 1);
6922 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6923 gimple_seq_add_stmt (dlist, g);
6925 t = build_int_cst (unsigned_type_node, 0);
6926 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6927 gimple_seq_add_stmt (dlist, g);
6929 tree body = create_artificial_label (UNKNOWN_LOCATION);
6930 tree header = create_artificial_label (UNKNOWN_LOCATION);
6931 tree end = create_artificial_label (UNKNOWN_LOCATION);
6932 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6933 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6935 gimple_seq_add_seq (dlist, llist[2]);
6937 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6938 gimple_seq_add_stmt (dlist, g);
6940 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6941 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6942 gimple_seq_add_stmt (dlist, g);
6944 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6946 for (int i = 0; i < 2; i++)
6947 if (llist[i])
6949 tree vf = create_tmp_var (unsigned_type_node);
6950 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6951 gimple_call_set_lhs (g, vf);
6952 gimple_seq *seq = i == 0 ? ilist : dlist;
6953 gimple_seq_add_stmt (seq, g);
6954 tree t = build_int_cst (unsigned_type_node, 0);
6955 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6956 gimple_seq_add_stmt (seq, g);
6957 tree body = create_artificial_label (UNKNOWN_LOCATION);
6958 tree header = create_artificial_label (UNKNOWN_LOCATION);
6959 tree end = create_artificial_label (UNKNOWN_LOCATION);
6960 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6961 gimple_seq_add_stmt (seq, gimple_build_label (body));
6962 gimple_seq_add_seq (seq, llist[i]);
6963 t = build_int_cst (unsigned_type_node, 1);
6964 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6965 gimple_seq_add_stmt (seq, g);
6966 gimple_seq_add_stmt (seq, gimple_build_label (header));
6967 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6968 gimple_seq_add_stmt (seq, g);
6969 gimple_seq_add_stmt (seq, gimple_build_label (end));
6972 if (sctx.is_simt)
6974 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6975 gimple *g
6976 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6977 gimple_seq_add_stmt (dlist, g);
6980 /* The copyin sequence is not to be executed by the main thread, since
6981 that would result in self-copies. Perhaps not visible to scalars,
6982 but it certainly is to C++ operator=. */
6983 if (copyin_seq)
6985 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6987 x = build2 (NE_EXPR, boolean_type_node, x,
6988 build_int_cst (TREE_TYPE (x), 0));
6989 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6990 gimplify_and_add (x, ilist);
6993 /* If any copyin variable is passed by reference, we must ensure the
6994 master thread doesn't modify it before it is copied over in all
6995 threads. Similarly for variables in both firstprivate and
6996 lastprivate clauses we need to ensure the lastprivate copying
6997 happens after firstprivate copying in all threads. And similarly
6998 for UDRs if initializer expression refers to omp_orig. */
6999 if (copyin_by_ref || lastprivate_firstprivate
7000 || (reduction_omp_orig_ref
7001 && !ctx->scan_inclusive
7002 && !ctx->scan_exclusive))
7004 /* Don't add any barrier for #pragma omp simd or
7005 #pragma omp distribute. */
7006 if (!is_task_ctx (ctx)
7007 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
7008 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
7009 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
7012 /* If max_vf is non-zero, then we can use only a vectorization factor
7013 up to the max_vf we chose. So stick it into the safelen clause. */
7014 if (maybe_ne (sctx.max_vf, 0U))
7016 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
7017 OMP_CLAUSE_SAFELEN);
7018 poly_uint64 safe_len;
7019 if (c == NULL_TREE
7020 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
7021 && maybe_gt (safe_len, sctx.max_vf)))
7023 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
7024 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
7025 sctx.max_vf);
7026 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
7027 gimple_omp_for_set_clauses (ctx->stmt, c);
7032 /* Create temporary variables for lastprivate(conditional:) implementation
7033 in context CTX with CLAUSES. */
7035 static void
7036 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
7038 tree iter_type = NULL_TREE;
7039 tree cond_ptr = NULL_TREE;
7040 tree iter_var = NULL_TREE;
7041 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7042 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
7043 tree next = *clauses;
7044 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7045 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7046 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7048 if (is_simd)
7050 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7051 gcc_assert (cc);
7052 if (iter_type == NULL_TREE)
7054 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7055 iter_var = create_tmp_var_raw (iter_type);
7056 DECL_CONTEXT (iter_var) = current_function_decl;
7057 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7058 DECL_CHAIN (iter_var) = ctx->block_vars;
7059 ctx->block_vars = iter_var;
7060 tree c3
7061 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7062 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7063 OMP_CLAUSE_DECL (c3) = iter_var;
7064 OMP_CLAUSE_CHAIN (c3) = *clauses;
7065 *clauses = c3;
7066 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7068 next = OMP_CLAUSE_CHAIN (cc);
7069 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7070 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7071 ctx->lastprivate_conditional_map->put (o, v);
7072 continue;
7074 if (iter_type == NULL)
7076 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7078 struct omp_for_data fd;
7079 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7080 NULL);
7081 iter_type = unsigned_type_for (fd.iter_type);
7083 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7084 iter_type = unsigned_type_node;
7085 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7086 if (c2)
7088 cond_ptr
7089 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7090 OMP_CLAUSE_DECL (c2) = cond_ptr;
7092 else
7094 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7095 DECL_CONTEXT (cond_ptr) = current_function_decl;
7096 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7097 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7098 ctx->block_vars = cond_ptr;
7099 c2 = build_omp_clause (UNKNOWN_LOCATION,
7100 OMP_CLAUSE__CONDTEMP_);
7101 OMP_CLAUSE_DECL (c2) = cond_ptr;
7102 OMP_CLAUSE_CHAIN (c2) = *clauses;
7103 *clauses = c2;
7105 iter_var = create_tmp_var_raw (iter_type);
7106 DECL_CONTEXT (iter_var) = current_function_decl;
7107 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7108 DECL_CHAIN (iter_var) = ctx->block_vars;
7109 ctx->block_vars = iter_var;
7110 tree c3
7111 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7112 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7113 OMP_CLAUSE_DECL (c3) = iter_var;
7114 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7115 OMP_CLAUSE_CHAIN (c2) = c3;
7116 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7118 tree v = create_tmp_var_raw (iter_type);
7119 DECL_CONTEXT (v) = current_function_decl;
7120 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7121 DECL_CHAIN (v) = ctx->block_vars;
7122 ctx->block_vars = v;
7123 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7124 ctx->lastprivate_conditional_map->put (o, v);
7129 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7130 both parallel and workshare constructs. PREDICATE may be NULL if it's
7131 always true. BODY_P is the sequence to insert early initialization
7132 if needed, STMT_LIST is where the non-conditional lastprivate handling
7133 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7134 section. */
7136 static void
7137 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7138 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7139 omp_context *ctx)
7141 tree x, c, label = NULL, orig_clauses = clauses;
7142 bool par_clauses = false;
7143 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7144 unsigned HOST_WIDE_INT conditional_off = 0;
7145 gimple_seq post_stmt_list = NULL;
7147 /* Early exit if there are no lastprivate or linear clauses. */
7148 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7149 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7150 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7151 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7152 break;
7153 if (clauses == NULL)
7155 /* If this was a workshare clause, see if it had been combined
7156 with its parallel. In that case, look for the clauses on the
7157 parallel statement itself. */
7158 if (is_parallel_ctx (ctx))
7159 return;
7161 ctx = ctx->outer;
7162 if (ctx == NULL || !is_parallel_ctx (ctx))
7163 return;
7165 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7166 OMP_CLAUSE_LASTPRIVATE);
7167 if (clauses == NULL)
7168 return;
7169 par_clauses = true;
7172 bool maybe_simt = false;
7173 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7174 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7176 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7177 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7178 if (simduid)
7179 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7182 if (predicate)
7184 gcond *stmt;
7185 tree label_true, arm1, arm2;
7186 enum tree_code pred_code = TREE_CODE (predicate);
7188 label = create_artificial_label (UNKNOWN_LOCATION);
7189 label_true = create_artificial_label (UNKNOWN_LOCATION);
7190 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7192 arm1 = TREE_OPERAND (predicate, 0);
7193 arm2 = TREE_OPERAND (predicate, 1);
7194 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7195 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7197 else
7199 arm1 = predicate;
7200 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7201 arm2 = boolean_false_node;
7202 pred_code = NE_EXPR;
7204 if (maybe_simt)
7206 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7207 c = fold_convert (integer_type_node, c);
7208 simtcond = create_tmp_var (integer_type_node);
7209 gimplify_assign (simtcond, c, stmt_list);
7210 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7211 1, simtcond);
7212 c = create_tmp_var (integer_type_node);
7213 gimple_call_set_lhs (g, c);
7214 gimple_seq_add_stmt (stmt_list, g);
7215 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7216 label_true, label);
7218 else
7219 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7220 gimple_seq_add_stmt (stmt_list, stmt);
7221 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7224 tree cond_ptr = NULL_TREE;
7225 for (c = clauses; c ;)
7227 tree var, new_var;
7228 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7229 gimple_seq *this_stmt_list = stmt_list;
7230 tree lab2 = NULL_TREE;
7232 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7233 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7234 && ctx->lastprivate_conditional_map
7235 && !ctx->combined_into_simd_safelen1)
7237 gcc_assert (body_p);
7238 if (simduid)
7239 goto next;
7240 if (cond_ptr == NULL_TREE)
7242 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7243 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7245 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7246 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7247 tree v = *ctx->lastprivate_conditional_map->get (o);
7248 gimplify_assign (v, build_zero_cst (type), body_p);
7249 this_stmt_list = cstmt_list;
7250 tree mem;
7251 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7253 mem = build2 (MEM_REF, type, cond_ptr,
7254 build_int_cst (TREE_TYPE (cond_ptr),
7255 conditional_off));
7256 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7258 else
7259 mem = build4 (ARRAY_REF, type, cond_ptr,
7260 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7261 tree mem2 = copy_node (mem);
7262 gimple_seq seq = NULL;
7263 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7264 gimple_seq_add_seq (this_stmt_list, seq);
7265 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7266 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7267 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7268 gimple_seq_add_stmt (this_stmt_list, g);
7269 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7270 gimplify_assign (mem2, v, this_stmt_list);
7272 else if (predicate
7273 && ctx->combined_into_simd_safelen1
7274 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7275 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7276 && ctx->lastprivate_conditional_map)
7277 this_stmt_list = &post_stmt_list;
7279 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7280 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7281 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7283 var = OMP_CLAUSE_DECL (c);
7284 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7285 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7286 && is_taskloop_ctx (ctx))
7288 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7289 new_var = lookup_decl (var, ctx->outer);
7291 else
7293 new_var = lookup_decl (var, ctx);
7294 /* Avoid uninitialized warnings for lastprivate and
7295 for linear iterators. */
7296 if (predicate
7297 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7298 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7299 suppress_warning (new_var, OPT_Wuninitialized);
7302 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7304 tree val = DECL_VALUE_EXPR (new_var);
7305 if (TREE_CODE (val) == ARRAY_REF
7306 && VAR_P (TREE_OPERAND (val, 0))
7307 && lookup_attribute ("omp simd array",
7308 DECL_ATTRIBUTES (TREE_OPERAND (val,
7309 0))))
7311 if (lastlane == NULL)
7313 lastlane = create_tmp_var (unsigned_type_node);
7314 gcall *g
7315 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7316 2, simduid,
7317 TREE_OPERAND (val, 1));
7318 gimple_call_set_lhs (g, lastlane);
7319 gimple_seq_add_stmt (this_stmt_list, g);
7321 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7322 TREE_OPERAND (val, 0), lastlane,
7323 NULL_TREE, NULL_TREE);
7324 TREE_THIS_NOTRAP (new_var) = 1;
7327 else if (maybe_simt)
7329 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7330 ? DECL_VALUE_EXPR (new_var)
7331 : new_var);
7332 if (simtlast == NULL)
7334 simtlast = create_tmp_var (unsigned_type_node);
7335 gcall *g = gimple_build_call_internal
7336 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7337 gimple_call_set_lhs (g, simtlast);
7338 gimple_seq_add_stmt (this_stmt_list, g);
7340 x = build_call_expr_internal_loc
7341 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7342 TREE_TYPE (val), 2, val, simtlast);
7343 new_var = unshare_expr (new_var);
7344 gimplify_assign (new_var, x, this_stmt_list);
7345 new_var = unshare_expr (new_var);
7348 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7349 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7351 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7352 gimple_seq_add_seq (this_stmt_list,
7353 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7354 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7356 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7357 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7359 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7360 gimple_seq_add_seq (this_stmt_list,
7361 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7362 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7365 x = NULL_TREE;
7366 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7367 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7368 && is_taskloop_ctx (ctx))
7370 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7371 ctx->outer->outer);
7372 if (is_global_var (ovar))
7373 x = ovar;
7375 if (!x)
7376 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7377 if (omp_privatize_by_reference (var))
7378 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7379 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7380 gimplify_and_add (x, this_stmt_list);
7382 if (lab2)
7383 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7386 next:
7387 c = OMP_CLAUSE_CHAIN (c);
7388 if (c == NULL && !par_clauses)
7390 /* If this was a workshare clause, see if it had been combined
7391 with its parallel. In that case, continue looking for the
7392 clauses also on the parallel statement itself. */
7393 if (is_parallel_ctx (ctx))
7394 break;
7396 ctx = ctx->outer;
7397 if (ctx == NULL || !is_parallel_ctx (ctx))
7398 break;
7400 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7401 OMP_CLAUSE_LASTPRIVATE);
7402 par_clauses = true;
7406 if (label)
7407 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7408 gimple_seq_add_seq (stmt_list, post_stmt_list);
7411 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7412 (which might be a placeholder). INNER is true if this is an inner
7413 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7414 join markers. Generate the before-loop forking sequence in
7415 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7416 general form of these sequences is
7418 GOACC_REDUCTION_SETUP
7419 GOACC_FORK
7420 GOACC_REDUCTION_INIT
7422 GOACC_REDUCTION_FINI
7423 GOACC_JOIN
7424 GOACC_REDUCTION_TEARDOWN. */
7426 static void
7427 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7428 gcall *fork, gcall *private_marker, gcall *join,
7429 gimple_seq *fork_seq, gimple_seq *join_seq,
7430 omp_context *ctx)
7432 gimple_seq before_fork = NULL;
7433 gimple_seq after_fork = NULL;
7434 gimple_seq before_join = NULL;
7435 gimple_seq after_join = NULL;
7436 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7437 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7438 unsigned offset = 0;
7440 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7441 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7443 /* No 'reduction' clauses on OpenACC 'kernels'. */
7444 gcc_checking_assert (!is_oacc_kernels (ctx));
7445 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7446 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7448 tree orig = OMP_CLAUSE_DECL (c);
7449 tree var = maybe_lookup_decl (orig, ctx);
7450 tree ref_to_res = NULL_TREE;
7451 tree incoming, outgoing, v1, v2, v3;
7452 bool is_private = false;
7454 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7455 if (rcode == MINUS_EXPR)
7456 rcode = PLUS_EXPR;
7457 else if (rcode == TRUTH_ANDIF_EXPR)
7458 rcode = BIT_AND_EXPR;
7459 else if (rcode == TRUTH_ORIF_EXPR)
7460 rcode = BIT_IOR_EXPR;
7461 tree op = build_int_cst (unsigned_type_node, rcode);
7463 if (!var)
7464 var = orig;
7466 incoming = outgoing = var;
7468 if (!inner)
7470 /* See if an outer construct also reduces this variable. */
7471 omp_context *outer = ctx;
7473 while (omp_context *probe = outer->outer)
7475 enum gimple_code type = gimple_code (probe->stmt);
7476 tree cls;
7478 switch (type)
7480 case GIMPLE_OMP_FOR:
7481 cls = gimple_omp_for_clauses (probe->stmt);
7482 break;
7484 case GIMPLE_OMP_TARGET:
7485 /* No 'reduction' clauses inside OpenACC 'kernels'
7486 regions. */
7487 gcc_checking_assert (!is_oacc_kernels (probe));
7489 if (!is_gimple_omp_offloaded (probe->stmt))
7490 goto do_lookup;
7492 cls = gimple_omp_target_clauses (probe->stmt);
7493 break;
7495 default:
7496 goto do_lookup;
7499 outer = probe;
7500 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7501 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7502 && orig == OMP_CLAUSE_DECL (cls))
7504 incoming = outgoing = lookup_decl (orig, probe);
7505 goto has_outer_reduction;
7507 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7508 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7509 && orig == OMP_CLAUSE_DECL (cls))
7511 is_private = true;
7512 goto do_lookup;
7516 do_lookup:
7517 /* This is the outermost construct with this reduction,
7518 see if there's a mapping for it. */
7519 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7520 && maybe_lookup_field (orig, outer) && !is_private)
7522 ref_to_res = build_receiver_ref (orig, false, outer);
7523 if (omp_privatize_by_reference (orig))
7524 ref_to_res = build_simple_mem_ref (ref_to_res);
7526 tree type = TREE_TYPE (var);
7527 if (POINTER_TYPE_P (type))
7528 type = TREE_TYPE (type);
7530 outgoing = var;
7531 incoming = omp_reduction_init_op (loc, rcode, type);
7533 else
7535 /* Try to look at enclosing contexts for reduction var,
7536 use original if no mapping found. */
7537 tree t = NULL_TREE;
7538 omp_context *c = ctx->outer;
7539 while (c && !t)
7541 t = maybe_lookup_decl (orig, c);
7542 c = c->outer;
7544 incoming = outgoing = (t ? t : orig);
7547 has_outer_reduction:;
7550 if (!ref_to_res)
7551 ref_to_res = integer_zero_node;
7553 if (omp_privatize_by_reference (orig))
7555 tree type = TREE_TYPE (var);
7556 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7558 if (!inner)
7560 tree x = create_tmp_var (TREE_TYPE (type), id);
7561 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7564 v1 = create_tmp_var (type, id);
7565 v2 = create_tmp_var (type, id);
7566 v3 = create_tmp_var (type, id);
7568 gimplify_assign (v1, var, fork_seq);
7569 gimplify_assign (v2, var, fork_seq);
7570 gimplify_assign (v3, var, fork_seq);
7572 var = build_simple_mem_ref (var);
7573 v1 = build_simple_mem_ref (v1);
7574 v2 = build_simple_mem_ref (v2);
7575 v3 = build_simple_mem_ref (v3);
7576 outgoing = build_simple_mem_ref (outgoing);
7578 if (!TREE_CONSTANT (incoming))
7579 incoming = build_simple_mem_ref (incoming);
7581 else
7582 v1 = v2 = v3 = var;
7584 /* Determine position in reduction buffer, which may be used
7585 by target. The parser has ensured that this is not a
7586 variable-sized type. */
7587 fixed_size_mode mode
7588 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7589 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7590 offset = (offset + align - 1) & ~(align - 1);
7591 tree off = build_int_cst (sizetype, offset);
7592 offset += GET_MODE_SIZE (mode);
7594 if (!init_code)
7596 init_code = build_int_cst (integer_type_node,
7597 IFN_GOACC_REDUCTION_INIT);
7598 fini_code = build_int_cst (integer_type_node,
7599 IFN_GOACC_REDUCTION_FINI);
7600 setup_code = build_int_cst (integer_type_node,
7601 IFN_GOACC_REDUCTION_SETUP);
7602 teardown_code = build_int_cst (integer_type_node,
7603 IFN_GOACC_REDUCTION_TEARDOWN);
7606 tree setup_call
7607 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7608 TREE_TYPE (var), 6, setup_code,
7609 unshare_expr (ref_to_res),
7610 incoming, level, op, off);
7611 tree init_call
7612 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7613 TREE_TYPE (var), 6, init_code,
7614 unshare_expr (ref_to_res),
7615 v1, level, op, off);
7616 tree fini_call
7617 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7618 TREE_TYPE (var), 6, fini_code,
7619 unshare_expr (ref_to_res),
7620 v2, level, op, off);
7621 tree teardown_call
7622 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7623 TREE_TYPE (var), 6, teardown_code,
7624 ref_to_res, v3, level, op, off);
7626 gimplify_assign (v1, setup_call, &before_fork);
7627 gimplify_assign (v2, init_call, &after_fork);
7628 gimplify_assign (v3, fini_call, &before_join);
7629 gimplify_assign (outgoing, teardown_call, &after_join);
7632 /* Now stitch things together. */
7633 gimple_seq_add_seq (fork_seq, before_fork);
7634 if (private_marker)
7635 gimple_seq_add_stmt (fork_seq, private_marker);
7636 if (fork)
7637 gimple_seq_add_stmt (fork_seq, fork);
7638 gimple_seq_add_seq (fork_seq, after_fork);
7640 gimple_seq_add_seq (join_seq, before_join);
7641 if (join)
7642 gimple_seq_add_stmt (join_seq, join);
7643 gimple_seq_add_seq (join_seq, after_join);
7646 /* Generate code to implement the REDUCTION clauses, append it
7647 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7648 that should be emitted also inside of the critical section,
7649 in that case clear *CLIST afterwards, otherwise leave it as is
7650 and let the caller emit it itself. */
7652 static void
7653 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7654 gimple_seq *clist, omp_context *ctx)
7656 gimple_seq sub_seq = NULL;
7657 gimple *stmt;
7658 tree x, c;
7659 int count = 0;
7661 /* OpenACC loop reductions are handled elsewhere. */
7662 if (is_gimple_omp_oacc (ctx->stmt))
7663 return;
7665 /* SIMD reductions are handled in lower_rec_input_clauses. */
7666 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7667 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7668 return;
7670 /* inscan reductions are handled elsewhere. */
7671 if (ctx->scan_inclusive || ctx->scan_exclusive)
7672 return;
7674 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7675 update in that case, otherwise use a lock. */
7676 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7677 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7678 && !OMP_CLAUSE_REDUCTION_TASK (c))
7680 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7681 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7683 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7684 count = -1;
7685 break;
7687 count++;
7690 if (count == 0)
7691 return;
7693 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7695 tree var, ref, new_var, orig_var;
7696 enum tree_code code;
7697 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7699 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7700 || OMP_CLAUSE_REDUCTION_TASK (c))
7701 continue;
7703 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7704 orig_var = var = OMP_CLAUSE_DECL (c);
7705 if (TREE_CODE (var) == MEM_REF)
7707 var = TREE_OPERAND (var, 0);
7708 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7709 var = TREE_OPERAND (var, 0);
7710 if (TREE_CODE (var) == ADDR_EXPR)
7711 var = TREE_OPERAND (var, 0);
7712 else
7714 /* If this is a pointer or referenced based array
7715 section, the var could be private in the outer
7716 context e.g. on orphaned loop construct. Pretend this
7717 is private variable's outer reference. */
7718 ccode = OMP_CLAUSE_PRIVATE;
7719 if (TREE_CODE (var) == INDIRECT_REF)
7720 var = TREE_OPERAND (var, 0);
7722 orig_var = var;
7723 if (is_variable_sized (var))
7725 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7726 var = DECL_VALUE_EXPR (var);
7727 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7728 var = TREE_OPERAND (var, 0);
7729 gcc_assert (DECL_P (var));
7732 new_var = lookup_decl (var, ctx);
7733 if (var == OMP_CLAUSE_DECL (c)
7734 && omp_privatize_by_reference (var))
7735 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7736 ref = build_outer_var_ref (var, ctx, ccode);
7737 code = OMP_CLAUSE_REDUCTION_CODE (c);
7739 /* reduction(-:var) sums up the partial results, so it acts
7740 identically to reduction(+:var). */
7741 if (code == MINUS_EXPR)
7742 code = PLUS_EXPR;
7744 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7745 if (count == 1)
7747 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7749 addr = save_expr (addr);
7750 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7751 tree new_var2 = new_var;
7752 tree ref2 = ref;
7753 if (is_truth_op)
7755 tree zero = build_zero_cst (TREE_TYPE (new_var));
7756 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7757 boolean_type_node, new_var, zero);
7758 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7759 ref, zero);
7761 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7762 new_var2);
7763 if (is_truth_op)
7764 x = fold_convert (TREE_TYPE (new_var), x);
7765 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7766 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7767 gimplify_and_add (x, stmt_seqp);
7768 return;
7770 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7772 tree d = OMP_CLAUSE_DECL (c);
7773 tree type = TREE_TYPE (d);
7774 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7775 tree i = create_tmp_var (TREE_TYPE (v));
7776 tree ptype = build_pointer_type (TREE_TYPE (type));
7777 tree bias = TREE_OPERAND (d, 1);
7778 d = TREE_OPERAND (d, 0);
7779 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7781 tree b = TREE_OPERAND (d, 1);
7782 b = maybe_lookup_decl (b, ctx);
7783 if (b == NULL)
7785 b = TREE_OPERAND (d, 1);
7786 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7788 if (integer_zerop (bias))
7789 bias = b;
7790 else
7792 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7793 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7794 TREE_TYPE (b), b, bias);
7796 d = TREE_OPERAND (d, 0);
7798 /* For ref build_outer_var_ref already performs this, so
7799 only new_var needs a dereference. */
7800 if (TREE_CODE (d) == INDIRECT_REF)
7802 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7803 gcc_assert (omp_privatize_by_reference (var)
7804 && var == orig_var);
7806 else if (TREE_CODE (d) == ADDR_EXPR)
7808 if (orig_var == var)
7810 new_var = build_fold_addr_expr (new_var);
7811 ref = build_fold_addr_expr (ref);
7814 else
7816 gcc_assert (orig_var == var);
7817 if (omp_privatize_by_reference (var))
7818 ref = build_fold_addr_expr (ref);
7820 if (DECL_P (v))
7822 tree t = maybe_lookup_decl (v, ctx);
7823 if (t)
7824 v = t;
7825 else
7826 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7827 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7829 if (!integer_zerop (bias))
7831 bias = fold_convert_loc (clause_loc, sizetype, bias);
7832 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7833 TREE_TYPE (new_var), new_var,
7834 unshare_expr (bias));
7835 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7836 TREE_TYPE (ref), ref, bias);
7838 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7839 ref = fold_convert_loc (clause_loc, ptype, ref);
7840 tree m = create_tmp_var (ptype);
7841 gimplify_assign (m, new_var, stmt_seqp);
7842 new_var = m;
7843 m = create_tmp_var (ptype);
7844 gimplify_assign (m, ref, stmt_seqp);
7845 ref = m;
7846 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7847 tree body = create_artificial_label (UNKNOWN_LOCATION);
7848 tree end = create_artificial_label (UNKNOWN_LOCATION);
7849 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7850 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7851 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7852 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7854 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7855 tree decl_placeholder
7856 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7857 SET_DECL_VALUE_EXPR (placeholder, out);
7858 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7859 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7860 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7861 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7862 gimple_seq_add_seq (&sub_seq,
7863 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7864 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7865 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7866 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7868 else
7870 tree out2 = out;
7871 tree priv2 = priv;
7872 if (is_truth_op)
7874 tree zero = build_zero_cst (TREE_TYPE (out));
7875 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7876 boolean_type_node, out, zero);
7877 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7878 boolean_type_node, priv, zero);
7880 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7881 if (is_truth_op)
7882 x = fold_convert (TREE_TYPE (out), x);
7883 out = unshare_expr (out);
7884 gimplify_assign (out, x, &sub_seq);
7886 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7887 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7888 gimple_seq_add_stmt (&sub_seq, g);
7889 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7890 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7891 gimple_seq_add_stmt (&sub_seq, g);
7892 g = gimple_build_assign (i, PLUS_EXPR, i,
7893 build_int_cst (TREE_TYPE (i), 1));
7894 gimple_seq_add_stmt (&sub_seq, g);
7895 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7896 gimple_seq_add_stmt (&sub_seq, g);
7897 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7899 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7901 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7903 if (omp_privatize_by_reference (var)
7904 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7905 TREE_TYPE (ref)))
7906 ref = build_fold_addr_expr_loc (clause_loc, ref);
7907 SET_DECL_VALUE_EXPR (placeholder, ref);
7908 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7909 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7910 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7911 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7912 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7914 else
7916 tree new_var2 = new_var;
7917 tree ref2 = ref;
7918 if (is_truth_op)
7920 tree zero = build_zero_cst (TREE_TYPE (new_var));
7921 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7922 boolean_type_node, new_var, zero);
7923 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7924 ref, zero);
7926 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7927 if (is_truth_op)
7928 x = fold_convert (TREE_TYPE (new_var), x);
7929 ref = build_outer_var_ref (var, ctx);
7930 gimplify_assign (ref, x, &sub_seq);
7934 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7936 gimple_seq_add_stmt (stmt_seqp, stmt);
7938 gimple_seq_add_seq (stmt_seqp, sub_seq);
7940 if (clist)
7942 gimple_seq_add_seq (stmt_seqp, *clist);
7943 *clist = NULL;
7946 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7948 gimple_seq_add_stmt (stmt_seqp, stmt);
7952 /* Generate code to implement the COPYPRIVATE clauses. */
7954 static void
7955 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7956 omp_context *ctx)
7958 tree c;
7960 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7962 tree var, new_var, ref, x;
7963 bool by_ref;
7964 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7966 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7967 continue;
7969 var = OMP_CLAUSE_DECL (c);
7970 by_ref = use_pointer_for_field (var, NULL);
7972 ref = build_sender_ref (var, ctx);
7973 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7974 if (by_ref)
7976 x = build_fold_addr_expr_loc (clause_loc, new_var);
7977 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7979 gimplify_assign (ref, x, slist);
7981 ref = build_receiver_ref (var, false, ctx);
7982 if (by_ref)
7984 ref = fold_convert_loc (clause_loc,
7985 build_pointer_type (TREE_TYPE (new_var)),
7986 ref);
7987 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7989 if (omp_privatize_by_reference (var))
7991 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7992 ref = build_simple_mem_ref_loc (clause_loc, ref);
7993 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7995 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7996 gimplify_and_add (x, rlist);
8001 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8002 and REDUCTION from the sender (aka parent) side. */
8004 static void
8005 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
8006 omp_context *ctx)
8008 tree c, t;
8009 int ignored_looptemp = 0;
8010 bool is_taskloop = false;
8012 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8013 by GOMP_taskloop. */
8014 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
8016 ignored_looptemp = 2;
8017 is_taskloop = true;
8020 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8022 tree val, ref, x, var;
8023 bool by_ref, do_in = false, do_out = false;
8024 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8026 switch (OMP_CLAUSE_CODE (c))
8028 case OMP_CLAUSE_PRIVATE:
8029 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8030 break;
8031 continue;
8032 case OMP_CLAUSE_FIRSTPRIVATE:
8033 case OMP_CLAUSE_COPYIN:
8034 case OMP_CLAUSE_LASTPRIVATE:
8035 case OMP_CLAUSE_IN_REDUCTION:
8036 case OMP_CLAUSE__REDUCTEMP_:
8037 break;
8038 case OMP_CLAUSE_REDUCTION:
8039 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
8040 continue;
8041 break;
8042 case OMP_CLAUSE_SHARED:
8043 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8044 break;
8045 continue;
8046 case OMP_CLAUSE__LOOPTEMP_:
8047 if (ignored_looptemp)
8049 ignored_looptemp--;
8050 continue;
8052 break;
8053 default:
8054 continue;
8057 val = OMP_CLAUSE_DECL (c);
8058 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8059 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8060 && TREE_CODE (val) == MEM_REF)
8062 val = TREE_OPERAND (val, 0);
8063 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8064 val = TREE_OPERAND (val, 0);
8065 if (TREE_CODE (val) == INDIRECT_REF
8066 || TREE_CODE (val) == ADDR_EXPR)
8067 val = TREE_OPERAND (val, 0);
8068 if (is_variable_sized (val))
8069 continue;
8072 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8073 outer taskloop region. */
8074 omp_context *ctx_for_o = ctx;
8075 if (is_taskloop
8076 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8077 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8078 ctx_for_o = ctx->outer;
8080 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8082 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8083 && is_global_var (var)
8084 && (val == OMP_CLAUSE_DECL (c)
8085 || !is_task_ctx (ctx)
8086 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8087 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8088 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8089 != POINTER_TYPE)))))
8090 continue;
8092 t = omp_member_access_dummy_var (var);
8093 if (t)
8095 var = DECL_VALUE_EXPR (var);
8096 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8097 if (o != t)
8098 var = unshare_and_remap (var, t, o);
8099 else
8100 var = unshare_expr (var);
8103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8105 /* Handle taskloop firstprivate/lastprivate, where the
8106 lastprivate on GIMPLE_OMP_TASK is represented as
8107 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8108 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8109 x = omp_build_component_ref (ctx->sender_decl, f);
8110 if (use_pointer_for_field (val, ctx))
8111 var = build_fold_addr_expr (var);
8112 gimplify_assign (x, var, ilist);
8113 DECL_ABSTRACT_ORIGIN (f) = NULL;
8114 continue;
8117 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8118 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8119 || val == OMP_CLAUSE_DECL (c))
8120 && is_variable_sized (val))
8121 continue;
8122 by_ref = use_pointer_for_field (val, NULL);
8124 switch (OMP_CLAUSE_CODE (c))
8126 case OMP_CLAUSE_FIRSTPRIVATE:
8127 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8128 && !by_ref
8129 && is_task_ctx (ctx))
8130 suppress_warning (var);
8131 do_in = true;
8132 break;
8134 case OMP_CLAUSE_PRIVATE:
8135 case OMP_CLAUSE_COPYIN:
8136 case OMP_CLAUSE__LOOPTEMP_:
8137 case OMP_CLAUSE__REDUCTEMP_:
8138 do_in = true;
8139 break;
8141 case OMP_CLAUSE_LASTPRIVATE:
8142 if (by_ref || omp_privatize_by_reference (val))
8144 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8145 continue;
8146 do_in = true;
8148 else
8150 do_out = true;
8151 if (lang_hooks.decls.omp_private_outer_ref (val))
8152 do_in = true;
8154 break;
8156 case OMP_CLAUSE_REDUCTION:
8157 case OMP_CLAUSE_IN_REDUCTION:
8158 do_in = true;
8159 if (val == OMP_CLAUSE_DECL (c))
8161 if (is_task_ctx (ctx))
8162 by_ref = use_pointer_for_field (val, ctx);
8163 else
8164 do_out = !(by_ref || omp_privatize_by_reference (val));
8166 else
8167 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8168 break;
8170 default:
8171 gcc_unreachable ();
8174 if (do_in)
8176 ref = build_sender_ref (val, ctx);
8177 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8178 gimplify_assign (ref, x, ilist);
8179 if (is_task_ctx (ctx))
8180 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8183 if (do_out)
8185 ref = build_sender_ref (val, ctx);
8186 gimplify_assign (var, ref, olist);
8191 /* Generate code to implement SHARED from the sender (aka parent)
8192 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8193 list things that got automatically shared. */
8195 static void
8196 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8198 tree var, ovar, nvar, t, f, x, record_type;
8200 if (ctx->record_type == NULL)
8201 return;
8203 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8204 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8206 ovar = DECL_ABSTRACT_ORIGIN (f);
8207 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8208 continue;
8210 nvar = maybe_lookup_decl (ovar, ctx);
8211 if (!nvar
8212 || !DECL_HAS_VALUE_EXPR_P (nvar)
8213 || (ctx->allocate_map
8214 && ctx->allocate_map->get (ovar)))
8215 continue;
8217 /* If CTX is a nested parallel directive. Find the immediately
8218 enclosing parallel or workshare construct that contains a
8219 mapping for OVAR. */
8220 var = lookup_decl_in_outer_ctx (ovar, ctx);
8222 t = omp_member_access_dummy_var (var);
8223 if (t)
8225 var = DECL_VALUE_EXPR (var);
8226 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8227 if (o != t)
8228 var = unshare_and_remap (var, t, o);
8229 else
8230 var = unshare_expr (var);
8233 if (use_pointer_for_field (ovar, ctx))
8235 x = build_sender_ref (ovar, ctx);
8236 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8237 && TREE_TYPE (f) == TREE_TYPE (ovar))
8239 gcc_assert (is_parallel_ctx (ctx)
8240 && DECL_ARTIFICIAL (ovar));
8241 /* _condtemp_ clause. */
8242 var = build_constructor (TREE_TYPE (x), NULL);
8244 else
8245 var = build_fold_addr_expr (var);
8246 gimplify_assign (x, var, ilist);
8248 else
8250 x = build_sender_ref (ovar, ctx);
8251 gimplify_assign (x, var, ilist);
8253 if (!TREE_READONLY (var)
8254 /* We don't need to receive a new reference to a result
8255 or parm decl. In fact we may not store to it as we will
8256 invalidate any pending RSO and generate wrong gimple
8257 during inlining. */
8258 && !((TREE_CODE (var) == RESULT_DECL
8259 || TREE_CODE (var) == PARM_DECL)
8260 && DECL_BY_REFERENCE (var)))
8262 x = build_sender_ref (ovar, ctx);
8263 gimplify_assign (var, x, olist);
8269 /* Emit an OpenACC head marker call, encapulating the partitioning and
8270 other information that must be processed by the target compiler.
8271 Return the maximum number of dimensions the associated loop might
8272 be partitioned over. */
8274 static unsigned
8275 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8276 gimple_seq *seq, omp_context *ctx)
8278 unsigned levels = 0;
8279 unsigned tag = 0;
8280 tree gang_static = NULL_TREE;
8281 auto_vec<tree, 5> args;
8283 args.quick_push (build_int_cst
8284 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8285 args.quick_push (ddvar);
8286 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8288 switch (OMP_CLAUSE_CODE (c))
8290 case OMP_CLAUSE_GANG:
8291 tag |= OLF_DIM_GANG;
8292 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8293 /* static:* is represented by -1, and we can ignore it, as
8294 scheduling is always static. */
8295 if (gang_static && integer_minus_onep (gang_static))
8296 gang_static = NULL_TREE;
8297 levels++;
8298 break;
8300 case OMP_CLAUSE_WORKER:
8301 tag |= OLF_DIM_WORKER;
8302 levels++;
8303 break;
8305 case OMP_CLAUSE_VECTOR:
8306 tag |= OLF_DIM_VECTOR;
8307 levels++;
8308 break;
8310 case OMP_CLAUSE_SEQ:
8311 tag |= OLF_SEQ;
8312 break;
8314 case OMP_CLAUSE_AUTO:
8315 tag |= OLF_AUTO;
8316 break;
8318 case OMP_CLAUSE_INDEPENDENT:
8319 tag |= OLF_INDEPENDENT;
8320 break;
8322 case OMP_CLAUSE_TILE:
8323 tag |= OLF_TILE;
8324 break;
8326 case OMP_CLAUSE_REDUCTION:
8327 tag |= OLF_REDUCTION;
8328 break;
8330 default:
8331 continue;
8335 if (gang_static)
8337 if (DECL_P (gang_static))
8338 gang_static = build_outer_var_ref (gang_static, ctx);
8339 tag |= OLF_GANG_STATIC;
8342 omp_context *tgt = enclosing_target_ctx (ctx);
8343 if (!tgt || is_oacc_parallel_or_serial (tgt))
8345 else if (is_oacc_kernels (tgt))
8346 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8347 gcc_unreachable ();
8348 else if (is_oacc_kernels_decomposed_part (tgt))
8350 else
8351 gcc_unreachable ();
8353 /* In a parallel region, loops are implicitly INDEPENDENT. */
8354 if (!tgt || is_oacc_parallel_or_serial (tgt))
8355 tag |= OLF_INDEPENDENT;
8357 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8358 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8359 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8361 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8362 gcc_assert (!(tag & OLF_AUTO));
8365 if (tag & OLF_TILE)
8366 /* Tiling could use all 3 levels. */
8367 levels = 3;
8368 else
8370 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8371 Ensure at least one level, or 2 for possible auto
8372 partitioning */
8373 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8374 << OLF_DIM_BASE) | OLF_SEQ));
8376 if (levels < 1u + maybe_auto)
8377 levels = 1u + maybe_auto;
8380 args.quick_push (build_int_cst (integer_type_node, levels));
8381 args.quick_push (build_int_cst (integer_type_node, tag));
8382 if (gang_static)
8383 args.quick_push (gang_static);
8385 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8386 gimple_set_location (call, loc);
8387 gimple_set_lhs (call, ddvar);
8388 gimple_seq_add_stmt (seq, call);
8390 return levels;
8393 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8394 partitioning level of the enclosed region. */
8396 static void
8397 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8398 tree tofollow, gimple_seq *seq)
8400 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8401 : IFN_UNIQUE_OACC_TAIL_MARK);
8402 tree marker = build_int_cst (integer_type_node, marker_kind);
8403 int nargs = 2 + (tofollow != NULL_TREE);
8404 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8405 marker, ddvar, tofollow);
8406 gimple_set_location (call, loc);
8407 gimple_set_lhs (call, ddvar);
8408 gimple_seq_add_stmt (seq, call);
8411 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8412 the loop clauses, from which we extract reductions. Initialize
8413 HEAD and TAIL. */
8415 static void
8416 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8417 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8419 bool inner = false;
8420 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8421 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8423 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8425 if (private_marker)
8427 gimple_set_location (private_marker, loc);
8428 gimple_call_set_lhs (private_marker, ddvar);
8429 gimple_call_set_arg (private_marker, 1, ddvar);
8432 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8433 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8435 gcc_assert (count);
8436 for (unsigned done = 1; count; count--, done++)
8438 gimple_seq fork_seq = NULL;
8439 gimple_seq join_seq = NULL;
8441 tree place = build_int_cst (integer_type_node, -1);
8442 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8443 fork_kind, ddvar, place);
8444 gimple_set_location (fork, loc);
8445 gimple_set_lhs (fork, ddvar);
8447 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8448 join_kind, ddvar, place);
8449 gimple_set_location (join, loc);
8450 gimple_set_lhs (join, ddvar);
8452 /* Mark the beginning of this level sequence. */
8453 if (inner)
8454 lower_oacc_loop_marker (loc, ddvar, true,
8455 build_int_cst (integer_type_node, count),
8456 &fork_seq);
8457 lower_oacc_loop_marker (loc, ddvar, false,
8458 build_int_cst (integer_type_node, done),
8459 &join_seq);
8461 lower_oacc_reductions (loc, clauses, place, inner,
8462 fork, (count == 1) ? private_marker : NULL,
8463 join, &fork_seq, &join_seq, ctx);
8465 /* Append this level to head. */
8466 gimple_seq_add_seq (head, fork_seq);
8467 /* Prepend it to tail. */
8468 gimple_seq_add_seq (&join_seq, *tail);
8469 *tail = join_seq;
8471 inner = true;
8474 /* Mark the end of the sequence. */
8475 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8476 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8479 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8480 catch handler and return it. This prevents programs from violating the
8481 structured block semantics with throws. */
8483 static gimple_seq
8484 maybe_catch_exception (gimple_seq body)
8486 gimple *g;
8487 tree decl;
8489 if (!flag_exceptions)
8490 return body;
8492 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8493 decl = lang_hooks.eh_protect_cleanup_actions ();
8494 else
8495 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8497 g = gimple_build_eh_must_not_throw (decl);
8498 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8499 GIMPLE_TRY_CATCH);
8501 return gimple_seq_alloc_with_stmt (g);
8505 /* Routines to lower OMP directives into OMP-GIMPLE. */
8507 /* If ctx is a worksharing context inside of a cancellable parallel
8508 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8509 and conditional branch to parallel's cancel_label to handle
8510 cancellation in the implicit barrier. */
8512 static void
8513 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8514 gimple_seq *body)
8516 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8517 if (gimple_omp_return_nowait_p (omp_return))
8518 return;
8519 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8520 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8521 && outer->cancellable)
8523 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8524 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8525 tree lhs = create_tmp_var (c_bool_type);
8526 gimple_omp_return_set_lhs (omp_return, lhs);
8527 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8528 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8529 fold_convert (c_bool_type,
8530 boolean_false_node),
8531 outer->cancel_label, fallthru_label);
8532 gimple_seq_add_stmt (body, g);
8533 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8535 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8536 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8537 return;
8540 /* Find the first task_reduction or reduction clause or return NULL
8541 if there are none. */
8543 static inline tree
8544 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8545 enum omp_clause_code ccode)
8547 while (1)
8549 clauses = omp_find_clause (clauses, ccode);
8550 if (clauses == NULL_TREE)
8551 return NULL_TREE;
8552 if (ccode != OMP_CLAUSE_REDUCTION
8553 || code == OMP_TASKLOOP
8554 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8555 return clauses;
8556 clauses = OMP_CLAUSE_CHAIN (clauses);
8560 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8561 gimple_seq *, gimple_seq *);
8563 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8564 CTX is the enclosing OMP context for the current statement. */
8566 static void
8567 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8569 tree block, control;
8570 gimple_stmt_iterator tgsi;
8571 gomp_sections *stmt;
8572 gimple *t;
8573 gbind *new_stmt, *bind;
8574 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8576 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8578 push_gimplify_context ();
8580 dlist = NULL;
8581 ilist = NULL;
8583 tree rclauses
8584 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8585 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8586 tree rtmp = NULL_TREE;
8587 if (rclauses)
8589 tree type = build_pointer_type (pointer_sized_int_node);
8590 tree temp = create_tmp_var (type);
8591 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8592 OMP_CLAUSE_DECL (c) = temp;
8593 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8594 gimple_omp_sections_set_clauses (stmt, c);
8595 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8596 gimple_omp_sections_clauses (stmt),
8597 &ilist, &tred_dlist);
8598 rclauses = c;
8599 rtmp = make_ssa_name (type);
8600 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8603 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8604 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8606 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8607 &ilist, &dlist, ctx, NULL);
8609 control = create_tmp_var (unsigned_type_node, ".section");
8610 gimple_omp_sections_set_control (stmt, control);
8612 new_body = gimple_omp_body (stmt);
8613 gimple_omp_set_body (stmt, NULL);
8614 tgsi = gsi_start (new_body);
8615 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8617 omp_context *sctx;
8618 gimple *sec_start;
8620 sec_start = gsi_stmt (tgsi);
8621 sctx = maybe_lookup_ctx (sec_start);
8622 gcc_assert (sctx);
8624 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8625 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8626 GSI_CONTINUE_LINKING);
8627 gimple_omp_set_body (sec_start, NULL);
8629 if (gsi_one_before_end_p (tgsi))
8631 gimple_seq l = NULL;
8632 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8633 &ilist, &l, &clist, ctx);
8634 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8635 gimple_omp_section_set_last (sec_start);
8638 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8639 GSI_CONTINUE_LINKING);
8642 block = make_node (BLOCK);
8643 bind = gimple_build_bind (NULL, new_body, block);
8645 olist = NULL;
8646 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8647 &clist, ctx);
8648 if (clist)
8650 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8651 gcall *g = gimple_build_call (fndecl, 0);
8652 gimple_seq_add_stmt (&olist, g);
8653 gimple_seq_add_seq (&olist, clist);
8654 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8655 g = gimple_build_call (fndecl, 0);
8656 gimple_seq_add_stmt (&olist, g);
8659 block = make_node (BLOCK);
8660 new_stmt = gimple_build_bind (NULL, NULL, block);
8661 gsi_replace (gsi_p, new_stmt, true);
8663 pop_gimplify_context (new_stmt);
8664 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8665 BLOCK_VARS (block) = gimple_bind_vars (bind);
8666 if (BLOCK_VARS (block))
8667 TREE_USED (block) = 1;
8669 new_body = NULL;
8670 gimple_seq_add_seq (&new_body, ilist);
8671 gimple_seq_add_stmt (&new_body, stmt);
8672 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8673 gimple_seq_add_stmt (&new_body, bind);
8675 t = gimple_build_omp_continue (control, control);
8676 gimple_seq_add_stmt (&new_body, t);
8678 gimple_seq_add_seq (&new_body, olist);
8679 if (ctx->cancellable)
8680 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8681 gimple_seq_add_seq (&new_body, dlist);
8683 new_body = maybe_catch_exception (new_body);
8685 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8686 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8687 t = gimple_build_omp_return (nowait);
8688 gimple_seq_add_stmt (&new_body, t);
8689 gimple_seq_add_seq (&new_body, tred_dlist);
8690 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8692 if (rclauses)
8693 OMP_CLAUSE_DECL (rclauses) = rtmp;
8695 gimple_bind_set_body (new_stmt, new_body);
8699 /* A subroutine of lower_omp_single. Expand the simple form of
8700 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8702 if (GOMP_single_start ())
8703 BODY;
8704 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8706 FIXME. It may be better to delay expanding the logic of this until
8707 pass_expand_omp. The expanded logic may make the job more difficult
8708 to a synchronization analysis pass. */
8710 static void
8711 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8713 location_t loc = gimple_location (single_stmt);
8714 tree tlabel = create_artificial_label (loc);
8715 tree flabel = create_artificial_label (loc);
8716 gimple *call, *cond;
8717 tree lhs, decl;
8719 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8720 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8721 call = gimple_build_call (decl, 0);
8722 gimple_call_set_lhs (call, lhs);
8723 gimple_seq_add_stmt (pre_p, call);
8725 cond = gimple_build_cond (EQ_EXPR, lhs,
8726 fold_convert_loc (loc, TREE_TYPE (lhs),
8727 boolean_true_node),
8728 tlabel, flabel);
8729 gimple_seq_add_stmt (pre_p, cond);
8730 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8731 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8732 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8736 /* A subroutine of lower_omp_single. Expand the simple form of
8737 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8739 #pragma omp single copyprivate (a, b, c)
8741 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8744 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8746 BODY;
8747 copyout.a = a;
8748 copyout.b = b;
8749 copyout.c = c;
8750 GOMP_single_copy_end (&copyout);
8752 else
8754 a = copyout_p->a;
8755 b = copyout_p->b;
8756 c = copyout_p->c;
8758 GOMP_barrier ();
8761 FIXME. It may be better to delay expanding the logic of this until
8762 pass_expand_omp. The expanded logic may make the job more difficult
8763 to a synchronization analysis pass. */
8765 static void
8766 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8767 omp_context *ctx)
8769 tree ptr_type, t, l0, l1, l2, bfn_decl;
8770 gimple_seq copyin_seq;
8771 location_t loc = gimple_location (single_stmt);
8773 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8775 ptr_type = build_pointer_type (ctx->record_type);
8776 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8778 l0 = create_artificial_label (loc);
8779 l1 = create_artificial_label (loc);
8780 l2 = create_artificial_label (loc);
8782 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8783 t = build_call_expr_loc (loc, bfn_decl, 0);
8784 t = fold_convert_loc (loc, ptr_type, t);
8785 gimplify_assign (ctx->receiver_decl, t, pre_p);
8787 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8788 build_int_cst (ptr_type, 0));
8789 t = build3 (COND_EXPR, void_type_node, t,
8790 build_and_jump (&l0), build_and_jump (&l1));
8791 gimplify_and_add (t, pre_p);
8793 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8795 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8797 copyin_seq = NULL;
8798 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8799 &copyin_seq, ctx);
8801 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8802 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8803 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8804 gimplify_and_add (t, pre_p);
8806 t = build_and_jump (&l2);
8807 gimplify_and_add (t, pre_p);
8809 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8811 gimple_seq_add_seq (pre_p, copyin_seq);
8813 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8817 /* Expand code for an OpenMP single directive. */
8819 static void
8820 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8822 tree block;
8823 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8824 gbind *bind;
8825 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8827 push_gimplify_context ();
8829 block = make_node (BLOCK);
8830 bind = gimple_build_bind (NULL, NULL, block);
8831 gsi_replace (gsi_p, bind, true);
8832 bind_body = NULL;
8833 dlist = NULL;
8834 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8835 &bind_body, &dlist, ctx, NULL);
8836 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8838 gimple_seq_add_stmt (&bind_body, single_stmt);
8840 if (ctx->record_type)
8841 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8842 else
8843 lower_omp_single_simple (single_stmt, &bind_body);
8845 gimple_omp_set_body (single_stmt, NULL);
8847 gimple_seq_add_seq (&bind_body, dlist);
8849 bind_body = maybe_catch_exception (bind_body);
8851 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8852 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8853 gimple *g = gimple_build_omp_return (nowait);
8854 gimple_seq_add_stmt (&bind_body_tail, g);
8855 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8856 if (ctx->record_type)
8858 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8859 tree clobber = build_clobber (ctx->record_type);
8860 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8861 clobber), GSI_SAME_STMT);
8863 gimple_seq_add_seq (&bind_body, bind_body_tail);
8864 gimple_bind_set_body (bind, bind_body);
8866 pop_gimplify_context (bind);
8868 gimple_bind_append_vars (bind, ctx->block_vars);
8869 BLOCK_VARS (block) = ctx->block_vars;
8870 if (BLOCK_VARS (block))
8871 TREE_USED (block) = 1;
8875 /* Lower code for an OMP scope directive. */
8877 static void
8878 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8880 tree block;
8881 gimple *scope_stmt = gsi_stmt (*gsi_p);
8882 gbind *bind;
8883 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8884 gimple_seq tred_dlist = NULL;
8886 push_gimplify_context ();
8888 block = make_node (BLOCK);
8889 bind = gimple_build_bind (NULL, NULL, block);
8890 gsi_replace (gsi_p, bind, true);
8891 bind_body = NULL;
8892 dlist = NULL;
8894 tree rclauses
8895 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8896 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8897 if (rclauses)
8899 tree type = build_pointer_type (pointer_sized_int_node);
8900 tree temp = create_tmp_var (type);
8901 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8902 OMP_CLAUSE_DECL (c) = temp;
8903 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8904 gimple_omp_scope_set_clauses (scope_stmt, c);
8905 lower_omp_task_reductions (ctx, OMP_SCOPE,
8906 gimple_omp_scope_clauses (scope_stmt),
8907 &bind_body, &tred_dlist);
8908 rclauses = c;
8909 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8910 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8911 gimple_seq_add_stmt (&bind_body, stmt);
8914 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8915 &bind_body, &dlist, ctx, NULL);
8916 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8918 gimple_seq_add_stmt (&bind_body, scope_stmt);
8920 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8922 gimple_omp_set_body (scope_stmt, NULL);
8924 gimple_seq clist = NULL;
8925 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8926 &bind_body, &clist, ctx);
8927 if (clist)
8929 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8930 gcall *g = gimple_build_call (fndecl, 0);
8931 gimple_seq_add_stmt (&bind_body, g);
8932 gimple_seq_add_seq (&bind_body, clist);
8933 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8934 g = gimple_build_call (fndecl, 0);
8935 gimple_seq_add_stmt (&bind_body, g);
8938 gimple_seq_add_seq (&bind_body, dlist);
8940 bind_body = maybe_catch_exception (bind_body);
8942 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8943 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8944 gimple *g = gimple_build_omp_return (nowait);
8945 gimple_seq_add_stmt (&bind_body_tail, g);
8946 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8947 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8948 if (ctx->record_type)
8950 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8951 tree clobber = build_clobber (ctx->record_type);
8952 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8953 clobber), GSI_SAME_STMT);
8955 gimple_seq_add_seq (&bind_body, bind_body_tail);
8957 gimple_bind_set_body (bind, bind_body);
8959 pop_gimplify_context (bind);
8961 gimple_bind_append_vars (bind, ctx->block_vars);
8962 BLOCK_VARS (block) = ctx->block_vars;
8963 if (BLOCK_VARS (block))
8964 TREE_USED (block) = 1;
8966 /* Expand code for an OpenMP master or masked directive. */
8968 static void
8969 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8971 tree block, lab = NULL, x, bfn_decl;
8972 gimple *stmt = gsi_stmt (*gsi_p);
8973 gbind *bind;
8974 location_t loc = gimple_location (stmt);
8975 gimple_seq tseq;
8976 tree filter = integer_zero_node;
8978 push_gimplify_context ();
8980 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8982 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8983 OMP_CLAUSE_FILTER);
8984 if (filter)
8985 filter = fold_convert (integer_type_node,
8986 OMP_CLAUSE_FILTER_EXPR (filter));
8987 else
8988 filter = integer_zero_node;
8990 block = make_node (BLOCK);
8991 bind = gimple_build_bind (NULL, NULL, block);
8992 gsi_replace (gsi_p, bind, true);
8993 gimple_bind_add_stmt (bind, stmt);
8995 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8996 x = build_call_expr_loc (loc, bfn_decl, 0);
8997 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8998 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8999 tseq = NULL;
9000 gimplify_and_add (x, &tseq);
9001 gimple_bind_add_seq (bind, tseq);
9003 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9004 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9005 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9006 gimple_omp_set_body (stmt, NULL);
9008 gimple_bind_add_stmt (bind, gimple_build_label (lab));
9010 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9012 pop_gimplify_context (bind);
9014 gimple_bind_append_vars (bind, ctx->block_vars);
9015 BLOCK_VARS (block) = ctx->block_vars;
9018 /* Helper function for lower_omp_task_reductions. For a specific PASS
9019 find out the current clause it should be processed, or return false
9020 if all have been processed already. */
9022 static inline bool
9023 omp_task_reduction_iterate (int pass, enum tree_code code,
9024 enum omp_clause_code ccode, tree *c, tree *decl,
9025 tree *type, tree *next)
9027 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
9029 if (ccode == OMP_CLAUSE_REDUCTION
9030 && code != OMP_TASKLOOP
9031 && !OMP_CLAUSE_REDUCTION_TASK (*c))
9032 continue;
9033 *decl = OMP_CLAUSE_DECL (*c);
9034 *type = TREE_TYPE (*decl);
9035 if (TREE_CODE (*decl) == MEM_REF)
9037 if (pass != 1)
9038 continue;
9040 else
9042 if (omp_privatize_by_reference (*decl))
9043 *type = TREE_TYPE (*type);
9044 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9045 continue;
9047 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9048 return true;
9050 *decl = NULL_TREE;
9051 *type = NULL_TREE;
9052 *next = NULL_TREE;
9053 return false;
9056 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9057 OMP_TASKGROUP only with task modifier). Register mapping of those in
9058 START sequence and reducing them and unregister them in the END sequence. */
9060 static void
9061 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9062 gimple_seq *start, gimple_seq *end)
9064 enum omp_clause_code ccode
9065 = (code == OMP_TASKGROUP
9066 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9067 tree cancellable = NULL_TREE;
9068 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9069 if (clauses == NULL_TREE)
9070 return;
9071 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9073 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9074 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9075 && outer->cancellable)
9077 cancellable = error_mark_node;
9078 break;
9080 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9081 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9082 break;
9084 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9085 tree *last = &TYPE_FIELDS (record_type);
9086 unsigned cnt = 0;
9087 if (cancellable)
9089 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9090 ptr_type_node);
9091 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9092 integer_type_node);
9093 *last = field;
9094 DECL_CHAIN (field) = ifield;
9095 last = &DECL_CHAIN (ifield);
9096 DECL_CONTEXT (field) = record_type;
9097 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9098 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9099 DECL_CONTEXT (ifield) = record_type;
9100 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9101 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9103 for (int pass = 0; pass < 2; pass++)
9105 tree decl, type, next;
9106 for (tree c = clauses;
9107 omp_task_reduction_iterate (pass, code, ccode,
9108 &c, &decl, &type, &next); c = next)
9110 ++cnt;
9111 tree new_type = type;
9112 if (ctx->outer)
9113 new_type = remap_type (type, &ctx->outer->cb);
9114 tree field
9115 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9116 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9117 new_type);
9118 if (DECL_P (decl) && type == TREE_TYPE (decl))
9120 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9121 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9122 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9124 else
9125 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9126 DECL_CONTEXT (field) = record_type;
9127 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9128 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9129 *last = field;
9130 last = &DECL_CHAIN (field);
9131 tree bfield
9132 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9133 boolean_type_node);
9134 DECL_CONTEXT (bfield) = record_type;
9135 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9136 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9137 *last = bfield;
9138 last = &DECL_CHAIN (bfield);
9141 *last = NULL_TREE;
9142 layout_type (record_type);
9144 /* Build up an array which registers with the runtime all the reductions
9145 and deregisters them at the end. Format documented in libgomp/task.c. */
9146 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9147 tree avar = create_tmp_var_raw (atype);
9148 gimple_add_tmp_var (avar);
9149 TREE_ADDRESSABLE (avar) = 1;
9150 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9151 NULL_TREE, NULL_TREE);
9152 tree t = build_int_cst (pointer_sized_int_node, cnt);
9153 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9154 gimple_seq seq = NULL;
9155 tree sz = fold_convert (pointer_sized_int_node,
9156 TYPE_SIZE_UNIT (record_type));
9157 int cachesz = 64;
9158 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9159 build_int_cst (pointer_sized_int_node, cachesz - 1));
9160 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9161 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9162 ctx->task_reductions.create (1 + cnt);
9163 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9164 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9165 ? sz : NULL_TREE);
9166 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9167 gimple_seq_add_seq (start, seq);
9168 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9169 NULL_TREE, NULL_TREE);
9170 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9171 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9172 NULL_TREE, NULL_TREE);
9173 t = build_int_cst (pointer_sized_int_node,
9174 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9175 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9176 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9177 NULL_TREE, NULL_TREE);
9178 t = build_int_cst (pointer_sized_int_node, -1);
9179 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9180 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9181 NULL_TREE, NULL_TREE);
9182 t = build_int_cst (pointer_sized_int_node, 0);
9183 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9185 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9186 and for each task reduction checks a bool right after the private variable
9187 within that thread's chunk; if the bool is clear, it hasn't been
9188 initialized and thus isn't going to be reduced nor destructed, otherwise
9189 reduce and destruct it. */
9190 tree idx = create_tmp_var (size_type_node);
9191 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9192 tree num_thr_sz = create_tmp_var (size_type_node);
9193 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9194 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9195 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9196 gimple *g;
9197 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9199 /* For worksharing constructs or scope, only perform it in the master
9200 thread, with the exception of cancelled implicit barriers - then only
9201 handle the current thread. */
9202 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9203 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9204 tree thr_num = create_tmp_var (integer_type_node);
9205 g = gimple_build_call (t, 0);
9206 gimple_call_set_lhs (g, thr_num);
9207 gimple_seq_add_stmt (end, g);
9208 if (cancellable)
9210 tree c;
9211 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9212 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9213 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9214 if (code == OMP_FOR)
9215 c = gimple_omp_for_clauses (ctx->stmt);
9216 else if (code == OMP_SECTIONS)
9217 c = gimple_omp_sections_clauses (ctx->stmt);
9218 else /* if (code == OMP_SCOPE) */
9219 c = gimple_omp_scope_clauses (ctx->stmt);
9220 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9221 cancellable = c;
9222 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9223 lab5, lab6);
9224 gimple_seq_add_stmt (end, g);
9225 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9226 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9227 gimple_seq_add_stmt (end, g);
9228 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9229 build_one_cst (TREE_TYPE (idx)));
9230 gimple_seq_add_stmt (end, g);
9231 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9232 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9234 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9235 gimple_seq_add_stmt (end, g);
9236 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9238 if (code != OMP_PARALLEL)
9240 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9241 tree num_thr = create_tmp_var (integer_type_node);
9242 g = gimple_build_call (t, 0);
9243 gimple_call_set_lhs (g, num_thr);
9244 gimple_seq_add_stmt (end, g);
9245 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9246 gimple_seq_add_stmt (end, g);
9247 if (cancellable)
9248 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9250 else
9252 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9253 OMP_CLAUSE__REDUCTEMP_);
9254 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9255 t = fold_convert (size_type_node, t);
9256 gimplify_assign (num_thr_sz, t, end);
9258 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9259 NULL_TREE, NULL_TREE);
9260 tree data = create_tmp_var (pointer_sized_int_node);
9261 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9262 if (code == OMP_TASKLOOP)
9264 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9265 g = gimple_build_cond (NE_EXPR, data,
9266 build_zero_cst (pointer_sized_int_node),
9267 lab1, lab7);
9268 gimple_seq_add_stmt (end, g);
9270 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9271 tree ptr;
9272 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9273 ptr = create_tmp_var (build_pointer_type (record_type));
9274 else
9275 ptr = create_tmp_var (ptr_type_node);
9276 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9278 tree field = TYPE_FIELDS (record_type);
9279 cnt = 0;
9280 if (cancellable)
9281 field = DECL_CHAIN (DECL_CHAIN (field));
9282 for (int pass = 0; pass < 2; pass++)
9284 tree decl, type, next;
9285 for (tree c = clauses;
9286 omp_task_reduction_iterate (pass, code, ccode,
9287 &c, &decl, &type, &next); c = next)
9289 tree var = decl, ref;
9290 if (TREE_CODE (decl) == MEM_REF)
9292 var = TREE_OPERAND (var, 0);
9293 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9294 var = TREE_OPERAND (var, 0);
9295 tree v = var;
9296 if (TREE_CODE (var) == ADDR_EXPR)
9297 var = TREE_OPERAND (var, 0);
9298 else if (TREE_CODE (var) == INDIRECT_REF)
9299 var = TREE_OPERAND (var, 0);
9300 tree orig_var = var;
9301 if (is_variable_sized (var))
9303 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9304 var = DECL_VALUE_EXPR (var);
9305 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9306 var = TREE_OPERAND (var, 0);
9307 gcc_assert (DECL_P (var));
9309 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9310 if (orig_var != var)
9311 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9312 else if (TREE_CODE (v) == ADDR_EXPR)
9313 t = build_fold_addr_expr (t);
9314 else if (TREE_CODE (v) == INDIRECT_REF)
9315 t = build_fold_indirect_ref (t);
9316 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9318 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9319 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9320 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9322 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9323 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9324 fold_convert (size_type_node,
9325 TREE_OPERAND (decl, 1)));
9327 else
9329 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9330 if (!omp_privatize_by_reference (decl))
9331 t = build_fold_addr_expr (t);
9333 t = fold_convert (pointer_sized_int_node, t);
9334 seq = NULL;
9335 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9336 gimple_seq_add_seq (start, seq);
9337 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9338 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9339 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9340 t = unshare_expr (byte_position (field));
9341 t = fold_convert (pointer_sized_int_node, t);
9342 ctx->task_reduction_map->put (c, cnt);
9343 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9344 ? t : NULL_TREE);
9345 seq = NULL;
9346 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9347 gimple_seq_add_seq (start, seq);
9348 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9349 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9350 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9352 tree bfield = DECL_CHAIN (field);
9353 tree cond;
9354 if (code == OMP_PARALLEL
9355 || code == OMP_FOR
9356 || code == OMP_SECTIONS
9357 || code == OMP_SCOPE)
9358 /* In parallel, worksharing or scope all threads unconditionally
9359 initialize all their task reduction private variables. */
9360 cond = boolean_true_node;
9361 else if (TREE_TYPE (ptr) == ptr_type_node)
9363 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9364 unshare_expr (byte_position (bfield)));
9365 seq = NULL;
9366 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9367 gimple_seq_add_seq (end, seq);
9368 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9369 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9370 build_int_cst (pbool, 0));
9372 else
9373 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9374 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9375 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9376 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9377 tree condv = create_tmp_var (boolean_type_node);
9378 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9379 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9380 lab3, lab4);
9381 gimple_seq_add_stmt (end, g);
9382 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9383 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9385 /* If this reduction doesn't need destruction and parallel
9386 has been cancelled, there is nothing to do for this
9387 reduction, so jump around the merge operation. */
9388 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9389 g = gimple_build_cond (NE_EXPR, cancellable,
9390 build_zero_cst (TREE_TYPE (cancellable)),
9391 lab4, lab5);
9392 gimple_seq_add_stmt (end, g);
9393 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9396 tree new_var;
9397 if (TREE_TYPE (ptr) == ptr_type_node)
9399 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9400 unshare_expr (byte_position (field)));
9401 seq = NULL;
9402 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9403 gimple_seq_add_seq (end, seq);
9404 tree pbool = build_pointer_type (TREE_TYPE (field));
9405 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9406 build_int_cst (pbool, 0));
9408 else
9409 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9410 build_simple_mem_ref (ptr), field, NULL_TREE);
9412 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9413 if (TREE_CODE (decl) != MEM_REF
9414 && omp_privatize_by_reference (decl))
9415 ref = build_simple_mem_ref (ref);
9416 /* reduction(-:var) sums up the partial results, so it acts
9417 identically to reduction(+:var). */
9418 if (rcode == MINUS_EXPR)
9419 rcode = PLUS_EXPR;
9420 if (TREE_CODE (decl) == MEM_REF)
9422 tree type = TREE_TYPE (new_var);
9423 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9424 tree i = create_tmp_var (TREE_TYPE (v));
9425 tree ptype = build_pointer_type (TREE_TYPE (type));
9426 if (DECL_P (v))
9428 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9429 tree vv = create_tmp_var (TREE_TYPE (v));
9430 gimplify_assign (vv, v, start);
9431 v = vv;
9433 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9434 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9435 new_var = build_fold_addr_expr (new_var);
9436 new_var = fold_convert (ptype, new_var);
9437 ref = fold_convert (ptype, ref);
9438 tree m = create_tmp_var (ptype);
9439 gimplify_assign (m, new_var, end);
9440 new_var = m;
9441 m = create_tmp_var (ptype);
9442 gimplify_assign (m, ref, end);
9443 ref = m;
9444 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9445 tree body = create_artificial_label (UNKNOWN_LOCATION);
9446 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9447 gimple_seq_add_stmt (end, gimple_build_label (body));
9448 tree priv = build_simple_mem_ref (new_var);
9449 tree out = build_simple_mem_ref (ref);
9450 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9452 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9453 tree decl_placeholder
9454 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9455 tree lab6 = NULL_TREE;
9456 if (cancellable)
9458 /* If this reduction needs destruction and parallel
9459 has been cancelled, jump around the merge operation
9460 to the destruction. */
9461 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9462 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9463 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9464 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9465 lab6, lab5);
9466 gimple_seq_add_stmt (end, g);
9467 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9469 SET_DECL_VALUE_EXPR (placeholder, out);
9470 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9471 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9472 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9473 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9474 gimple_seq_add_seq (end,
9475 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9476 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9477 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9479 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9480 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9482 if (cancellable)
9483 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9484 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9485 if (x)
9487 gimple_seq tseq = NULL;
9488 gimplify_stmt (&x, &tseq);
9489 gimple_seq_add_seq (end, tseq);
9492 else
9494 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9495 out = unshare_expr (out);
9496 gimplify_assign (out, x, end);
9498 gimple *g
9499 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9500 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9501 gimple_seq_add_stmt (end, g);
9502 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9503 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9504 gimple_seq_add_stmt (end, g);
9505 g = gimple_build_assign (i, PLUS_EXPR, i,
9506 build_int_cst (TREE_TYPE (i), 1));
9507 gimple_seq_add_stmt (end, g);
9508 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9509 gimple_seq_add_stmt (end, g);
9510 gimple_seq_add_stmt (end, gimple_build_label (endl));
9512 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9514 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9515 tree oldv = NULL_TREE;
9516 tree lab6 = NULL_TREE;
9517 if (cancellable)
9519 /* If this reduction needs destruction and parallel
9520 has been cancelled, jump around the merge operation
9521 to the destruction. */
9522 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9523 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9524 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9525 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9526 lab6, lab5);
9527 gimple_seq_add_stmt (end, g);
9528 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9530 if (omp_privatize_by_reference (decl)
9531 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9532 TREE_TYPE (ref)))
9533 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9534 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9535 tree refv = create_tmp_var (TREE_TYPE (ref));
9536 gimplify_assign (refv, ref, end);
9537 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9538 SET_DECL_VALUE_EXPR (placeholder, ref);
9539 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9540 tree d = maybe_lookup_decl (decl, ctx);
9541 gcc_assert (d);
9542 if (DECL_HAS_VALUE_EXPR_P (d))
9543 oldv = DECL_VALUE_EXPR (d);
9544 if (omp_privatize_by_reference (var))
9546 tree v = fold_convert (TREE_TYPE (d),
9547 build_fold_addr_expr (new_var));
9548 SET_DECL_VALUE_EXPR (d, v);
9550 else
9551 SET_DECL_VALUE_EXPR (d, new_var);
9552 DECL_HAS_VALUE_EXPR_P (d) = 1;
9553 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9554 if (oldv)
9555 SET_DECL_VALUE_EXPR (d, oldv);
9556 else
9558 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9559 DECL_HAS_VALUE_EXPR_P (d) = 0;
9561 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9562 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9563 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9564 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9565 if (cancellable)
9566 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9567 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9568 if (x)
9570 gimple_seq tseq = NULL;
9571 gimplify_stmt (&x, &tseq);
9572 gimple_seq_add_seq (end, tseq);
9575 else
9577 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9578 ref = unshare_expr (ref);
9579 gimplify_assign (ref, x, end);
9581 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9582 ++cnt;
9583 field = DECL_CHAIN (bfield);
9587 if (code == OMP_TASKGROUP)
9589 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9590 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9591 gimple_seq_add_stmt (start, g);
9593 else
9595 tree c;
9596 if (code == OMP_FOR)
9597 c = gimple_omp_for_clauses (ctx->stmt);
9598 else if (code == OMP_SECTIONS)
9599 c = gimple_omp_sections_clauses (ctx->stmt);
9600 else if (code == OMP_SCOPE)
9601 c = gimple_omp_scope_clauses (ctx->stmt);
9602 else
9603 c = gimple_omp_taskreg_clauses (ctx->stmt);
9604 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9605 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9606 build_fold_addr_expr (avar));
9607 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9610 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9611 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9612 size_one_node));
9613 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9614 gimple_seq_add_stmt (end, g);
9615 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9616 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9618 enum built_in_function bfn
9619 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9620 t = builtin_decl_explicit (bfn);
9621 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9622 tree arg;
9623 if (cancellable)
9625 arg = create_tmp_var (c_bool_type);
9626 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9627 cancellable));
9629 else
9630 arg = build_int_cst (c_bool_type, 0);
9631 g = gimple_build_call (t, 1, arg);
9633 else
9635 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9636 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9638 gimple_seq_add_stmt (end, g);
9639 if (lab7)
9640 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9641 t = build_constructor (atype, NULL);
9642 TREE_THIS_VOLATILE (t) = 1;
9643 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9646 /* Expand code for an OpenMP taskgroup directive. */
9648 static void
9649 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9651 gimple *stmt = gsi_stmt (*gsi_p);
9652 gcall *x;
9653 gbind *bind;
9654 gimple_seq dseq = NULL;
9655 tree block = make_node (BLOCK);
9657 bind = gimple_build_bind (NULL, NULL, block);
9658 gsi_replace (gsi_p, bind, true);
9659 gimple_bind_add_stmt (bind, stmt);
9661 push_gimplify_context ();
9663 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9665 gimple_bind_add_stmt (bind, x);
9667 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9668 gimple_omp_taskgroup_clauses (stmt),
9669 gimple_bind_body_ptr (bind), &dseq);
9671 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9672 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9673 gimple_omp_set_body (stmt, NULL);
9675 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9676 gimple_bind_add_seq (bind, dseq);
9678 pop_gimplify_context (bind);
9680 gimple_bind_append_vars (bind, ctx->block_vars);
9681 BLOCK_VARS (block) = ctx->block_vars;
9685 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9687 static void
9688 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9689 omp_context *ctx)
9691 struct omp_for_data fd;
9692 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9693 return;
9695 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9696 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9697 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9698 if (!fd.ordered)
9699 return;
9701 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9702 tree c = gimple_omp_ordered_clauses (ord_stmt);
9703 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9704 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9706 /* Merge depend clauses from multiple adjacent
9707 #pragma omp ordered depend(sink:...) constructs
9708 into one #pragma omp ordered depend(sink:...), so that
9709 we can optimize them together. */
9710 gimple_stmt_iterator gsi = *gsi_p;
9711 gsi_next (&gsi);
9712 while (!gsi_end_p (gsi))
9714 gimple *stmt = gsi_stmt (gsi);
9715 if (is_gimple_debug (stmt)
9716 || gimple_code (stmt) == GIMPLE_NOP)
9718 gsi_next (&gsi);
9719 continue;
9721 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9722 break;
9723 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9724 c = gimple_omp_ordered_clauses (ord_stmt2);
9725 if (c == NULL_TREE
9726 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9727 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9728 break;
9729 while (*list_p)
9730 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9731 *list_p = c;
9732 gsi_remove (&gsi, true);
9736 /* Canonicalize sink dependence clauses into one folded clause if
9737 possible.
9739 The basic algorithm is to create a sink vector whose first
9740 element is the GCD of all the first elements, and whose remaining
9741 elements are the minimum of the subsequent columns.
9743 We ignore dependence vectors whose first element is zero because
9744 such dependencies are known to be executed by the same thread.
9746 We take into account the direction of the loop, so a minimum
9747 becomes a maximum if the loop is iterating forwards. We also
9748 ignore sink clauses where the loop direction is unknown, or where
9749 the offsets are clearly invalid because they are not a multiple
9750 of the loop increment.
9752 For example:
9754 #pragma omp for ordered(2)
9755 for (i=0; i < N; ++i)
9756 for (j=0; j < M; ++j)
9758 #pragma omp ordered \
9759 depend(sink:i-8,j-2) \
9760 depend(sink:i,j-1) \ // Completely ignored because i+0.
9761 depend(sink:i-4,j-3) \
9762 depend(sink:i-6,j-4)
9763 #pragma omp ordered depend(source)
9766 Folded clause is:
9768 depend(sink:-gcd(8,4,6),-min(2,3,4))
9769 -or-
9770 depend(sink:-2,-2)
9773 /* FIXME: Computing GCD's where the first element is zero is
9774 non-trivial in the presence of collapsed loops. Do this later. */
9775 if (fd.collapse > 1)
9776 return;
9778 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9780 /* wide_int is not a POD so it must be default-constructed. */
9781 for (unsigned i = 0; i != 2 * len - 1; ++i)
9782 new (static_cast<void*>(folded_deps + i)) wide_int ();
9784 tree folded_dep = NULL_TREE;
9785 /* TRUE if the first dimension's offset is negative. */
9786 bool neg_offset_p = false;
9788 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9789 unsigned int i;
9790 while ((c = *list_p) != NULL)
9792 bool remove = false;
9794 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9795 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9796 goto next_ordered_clause;
9798 tree vec;
9799 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9800 vec && TREE_CODE (vec) == TREE_LIST;
9801 vec = TREE_CHAIN (vec), ++i)
9803 gcc_assert (i < len);
9805 /* omp_extract_for_data has canonicalized the condition. */
9806 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9807 || fd.loops[i].cond_code == GT_EXPR);
9808 bool forward = fd.loops[i].cond_code == LT_EXPR;
9809 bool maybe_lexically_later = true;
9811 /* While the committee makes up its mind, bail if we have any
9812 non-constant steps. */
9813 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9814 goto lower_omp_ordered_ret;
9816 tree itype = TREE_TYPE (TREE_VALUE (vec));
9817 if (POINTER_TYPE_P (itype))
9818 itype = sizetype;
9819 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9820 TYPE_PRECISION (itype),
9821 TYPE_SIGN (itype));
9823 /* Ignore invalid offsets that are not multiples of the step. */
9824 if (!wi::multiple_of_p (wi::abs (offset),
9825 wi::abs (wi::to_wide (fd.loops[i].step)),
9826 UNSIGNED))
9828 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9829 "ignoring sink clause with offset that is not "
9830 "a multiple of the loop step");
9831 remove = true;
9832 goto next_ordered_clause;
9835 /* Calculate the first dimension. The first dimension of
9836 the folded dependency vector is the GCD of the first
9837 elements, while ignoring any first elements whose offset
9838 is 0. */
9839 if (i == 0)
9841 /* Ignore dependence vectors whose first dimension is 0. */
9842 if (offset == 0)
9844 remove = true;
9845 goto next_ordered_clause;
9847 else
9849 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9851 error_at (OMP_CLAUSE_LOCATION (c),
9852 "first offset must be in opposite direction "
9853 "of loop iterations");
9854 goto lower_omp_ordered_ret;
9856 if (forward)
9857 offset = -offset;
9858 neg_offset_p = forward;
9859 /* Initialize the first time around. */
9860 if (folded_dep == NULL_TREE)
9862 folded_dep = c;
9863 folded_deps[0] = offset;
9865 else
9866 folded_deps[0] = wi::gcd (folded_deps[0],
9867 offset, UNSIGNED);
9870 /* Calculate minimum for the remaining dimensions. */
9871 else
9873 folded_deps[len + i - 1] = offset;
9874 if (folded_dep == c)
9875 folded_deps[i] = offset;
9876 else if (maybe_lexically_later
9877 && !wi::eq_p (folded_deps[i], offset))
9879 if (forward ^ wi::gts_p (folded_deps[i], offset))
9881 unsigned int j;
9882 folded_dep = c;
9883 for (j = 1; j <= i; j++)
9884 folded_deps[j] = folded_deps[len + j - 1];
9886 else
9887 maybe_lexically_later = false;
9891 gcc_assert (i == len);
9893 remove = true;
9895 next_ordered_clause:
9896 if (remove)
9897 *list_p = OMP_CLAUSE_CHAIN (c);
9898 else
9899 list_p = &OMP_CLAUSE_CHAIN (c);
9902 if (folded_dep)
9904 if (neg_offset_p)
9905 folded_deps[0] = -folded_deps[0];
9907 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9908 if (POINTER_TYPE_P (itype))
9909 itype = sizetype;
9911 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9912 = wide_int_to_tree (itype, folded_deps[0]);
9913 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9914 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9917 lower_omp_ordered_ret:
9919 /* Ordered without clauses is #pragma omp threads, while we want
9920 a nop instead if we remove all clauses. */
9921 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9922 gsi_replace (gsi_p, gimple_build_nop (), true);
9926 /* Expand code for an OpenMP ordered directive. */
9928 static void
9929 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9931 tree block;
9932 gimple *stmt = gsi_stmt (*gsi_p), *g;
9933 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9934 gcall *x;
9935 gbind *bind;
9936 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9937 OMP_CLAUSE_SIMD);
9938 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9939 loop. */
9940 bool maybe_simt
9941 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9942 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9943 OMP_CLAUSE_THREADS);
9945 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9946 OMP_CLAUSE_DEPEND))
9948 /* FIXME: This is needs to be moved to the expansion to verify various
9949 conditions only testable on cfg with dominators computed, and also
9950 all the depend clauses to be merged still might need to be available
9951 for the runtime checks. */
9952 if (0)
9953 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9954 return;
9957 push_gimplify_context ();
9959 block = make_node (BLOCK);
9960 bind = gimple_build_bind (NULL, NULL, block);
9961 gsi_replace (gsi_p, bind, true);
9962 gimple_bind_add_stmt (bind, stmt);
9964 if (simd)
9966 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9967 build_int_cst (NULL_TREE, threads));
9968 cfun->has_simduid_loops = true;
9970 else
9971 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9973 gimple_bind_add_stmt (bind, x);
9975 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9976 if (maybe_simt)
9978 counter = create_tmp_var (integer_type_node);
9979 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9980 gimple_call_set_lhs (g, counter);
9981 gimple_bind_add_stmt (bind, g);
9983 body = create_artificial_label (UNKNOWN_LOCATION);
9984 test = create_artificial_label (UNKNOWN_LOCATION);
9985 gimple_bind_add_stmt (bind, gimple_build_label (body));
9987 tree simt_pred = create_tmp_var (integer_type_node);
9988 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9989 gimple_call_set_lhs (g, simt_pred);
9990 gimple_bind_add_stmt (bind, g);
9992 tree t = create_artificial_label (UNKNOWN_LOCATION);
9993 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9994 gimple_bind_add_stmt (bind, g);
9996 gimple_bind_add_stmt (bind, gimple_build_label (t));
9998 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9999 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10000 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10001 gimple_omp_set_body (stmt, NULL);
10003 if (maybe_simt)
10005 gimple_bind_add_stmt (bind, gimple_build_label (test));
10006 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
10007 gimple_bind_add_stmt (bind, g);
10009 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
10010 tree nonneg = create_tmp_var (integer_type_node);
10011 gimple_seq tseq = NULL;
10012 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
10013 gimple_bind_add_seq (bind, tseq);
10015 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
10016 gimple_call_set_lhs (g, nonneg);
10017 gimple_bind_add_stmt (bind, g);
10019 tree end = create_artificial_label (UNKNOWN_LOCATION);
10020 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
10021 gimple_bind_add_stmt (bind, g);
10023 gimple_bind_add_stmt (bind, gimple_build_label (end));
10025 if (simd)
10026 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
10027 build_int_cst (NULL_TREE, threads));
10028 else
10029 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
10031 gimple_bind_add_stmt (bind, x);
10033 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10035 pop_gimplify_context (bind);
10037 gimple_bind_append_vars (bind, ctx->block_vars);
10038 BLOCK_VARS (block) = gimple_bind_vars (bind);
10042 /* Expand code for an OpenMP scan directive and the structured block
10043 before the scan directive. */
10045 static void
10046 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10048 gimple *stmt = gsi_stmt (*gsi_p);
10049 bool has_clauses
10050 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10051 tree lane = NULL_TREE;
10052 gimple_seq before = NULL;
10053 omp_context *octx = ctx->outer;
10054 gcc_assert (octx);
10055 if (octx->scan_exclusive && !has_clauses)
10057 gimple_stmt_iterator gsi2 = *gsi_p;
10058 gsi_next (&gsi2);
10059 gimple *stmt2 = gsi_stmt (gsi2);
10060 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10061 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10062 the one with exclusive clause(s), comes first. */
10063 if (stmt2
10064 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10065 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10067 gsi_remove (gsi_p, false);
10068 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10069 ctx = maybe_lookup_ctx (stmt2);
10070 gcc_assert (ctx);
10071 lower_omp_scan (gsi_p, ctx);
10072 return;
10076 bool input_phase = has_clauses ^ octx->scan_inclusive;
10077 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10078 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10079 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10080 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10081 && !gimple_omp_for_combined_p (octx->stmt));
10082 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10083 if (is_for_simd && octx->for_simd_scan_phase)
10084 is_simd = false;
10085 if (is_simd)
10086 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10087 OMP_CLAUSE__SIMDUID_))
10089 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10090 lane = create_tmp_var (unsigned_type_node);
10091 tree t = build_int_cst (integer_type_node,
10092 input_phase ? 1
10093 : octx->scan_inclusive ? 2 : 3);
10094 gimple *g
10095 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10096 gimple_call_set_lhs (g, lane);
10097 gimple_seq_add_stmt (&before, g);
10100 if (is_simd || is_for)
10102 for (tree c = gimple_omp_for_clauses (octx->stmt);
10103 c; c = OMP_CLAUSE_CHAIN (c))
10104 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10105 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10107 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10108 tree var = OMP_CLAUSE_DECL (c);
10109 tree new_var = lookup_decl (var, octx);
10110 tree val = new_var;
10111 tree var2 = NULL_TREE;
10112 tree var3 = NULL_TREE;
10113 tree var4 = NULL_TREE;
10114 tree lane0 = NULL_TREE;
10115 tree new_vard = new_var;
10116 if (omp_privatize_by_reference (var))
10118 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10119 val = new_var;
10121 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10123 val = DECL_VALUE_EXPR (new_vard);
10124 if (new_vard != new_var)
10126 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10127 val = TREE_OPERAND (val, 0);
10129 if (TREE_CODE (val) == ARRAY_REF
10130 && VAR_P (TREE_OPERAND (val, 0)))
10132 tree v = TREE_OPERAND (val, 0);
10133 if (lookup_attribute ("omp simd array",
10134 DECL_ATTRIBUTES (v)))
10136 val = unshare_expr (val);
10137 lane0 = TREE_OPERAND (val, 1);
10138 TREE_OPERAND (val, 1) = lane;
10139 var2 = lookup_decl (v, octx);
10140 if (octx->scan_exclusive)
10141 var4 = lookup_decl (var2, octx);
10142 if (input_phase
10143 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10144 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10145 if (!input_phase)
10147 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10148 var2, lane, NULL_TREE, NULL_TREE);
10149 TREE_THIS_NOTRAP (var2) = 1;
10150 if (octx->scan_exclusive)
10152 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10153 var4, lane, NULL_TREE,
10154 NULL_TREE);
10155 TREE_THIS_NOTRAP (var4) = 1;
10158 else
10159 var2 = val;
10162 gcc_assert (var2);
10164 else
10166 var2 = build_outer_var_ref (var, octx);
10167 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10169 var3 = maybe_lookup_decl (new_vard, octx);
10170 if (var3 == new_vard || var3 == NULL_TREE)
10171 var3 = NULL_TREE;
10172 else if (is_simd && octx->scan_exclusive && !input_phase)
10174 var4 = maybe_lookup_decl (var3, octx);
10175 if (var4 == var3 || var4 == NULL_TREE)
10177 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10179 var4 = var3;
10180 var3 = NULL_TREE;
10182 else
10183 var4 = NULL_TREE;
10187 if (is_simd
10188 && octx->scan_exclusive
10189 && !input_phase
10190 && var4 == NULL_TREE)
10191 var4 = create_tmp_var (TREE_TYPE (val));
10193 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10195 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10196 if (input_phase)
10198 if (var3)
10200 /* If we've added a separate identity element
10201 variable, copy it over into val. */
10202 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10203 var3);
10204 gimplify_and_add (x, &before);
10206 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10208 /* Otherwise, assign to it the identity element. */
10209 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10210 if (is_for)
10211 tseq = copy_gimple_seq_and_replace_locals (tseq);
10212 tree ref = build_outer_var_ref (var, octx);
10213 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10214 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10215 if (x)
10217 if (new_vard != new_var)
10218 val = build_fold_addr_expr_loc (clause_loc, val);
10219 SET_DECL_VALUE_EXPR (new_vard, val);
10221 SET_DECL_VALUE_EXPR (placeholder, ref);
10222 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10223 lower_omp (&tseq, octx);
10224 if (x)
10225 SET_DECL_VALUE_EXPR (new_vard, x);
10226 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10227 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10228 gimple_seq_add_seq (&before, tseq);
10229 if (is_simd)
10230 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10233 else if (is_simd)
10235 tree x;
10236 if (octx->scan_exclusive)
10238 tree v4 = unshare_expr (var4);
10239 tree v2 = unshare_expr (var2);
10240 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10241 gimplify_and_add (x, &before);
10243 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10244 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10245 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10246 tree vexpr = val;
10247 if (x && new_vard != new_var)
10248 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10249 if (x)
10250 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10251 SET_DECL_VALUE_EXPR (placeholder, var2);
10252 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10253 lower_omp (&tseq, octx);
10254 gimple_seq_add_seq (&before, tseq);
10255 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10256 if (x)
10257 SET_DECL_VALUE_EXPR (new_vard, x);
10258 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10259 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10260 if (octx->scan_inclusive)
10262 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10263 var2);
10264 gimplify_and_add (x, &before);
10266 else if (lane0 == NULL_TREE)
10268 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10269 var4);
10270 gimplify_and_add (x, &before);
10274 else
10276 if (input_phase)
10278 /* input phase. Set val to initializer before
10279 the body. */
10280 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10281 gimplify_assign (val, x, &before);
10283 else if (is_simd)
10285 /* scan phase. */
10286 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10287 if (code == MINUS_EXPR)
10288 code = PLUS_EXPR;
10290 tree x = build2 (code, TREE_TYPE (var2),
10291 unshare_expr (var2), unshare_expr (val));
10292 if (octx->scan_inclusive)
10294 gimplify_assign (unshare_expr (var2), x, &before);
10295 gimplify_assign (val, var2, &before);
10297 else
10299 gimplify_assign (unshare_expr (var4),
10300 unshare_expr (var2), &before);
10301 gimplify_assign (var2, x, &before);
10302 if (lane0 == NULL_TREE)
10303 gimplify_assign (val, var4, &before);
10307 if (octx->scan_exclusive && !input_phase && lane0)
10309 tree vexpr = unshare_expr (var4);
10310 TREE_OPERAND (vexpr, 1) = lane0;
10311 if (new_vard != new_var)
10312 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10313 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10317 if (is_simd && !is_for_simd)
10319 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10320 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10321 gsi_replace (gsi_p, gimple_build_nop (), true);
10322 return;
10324 lower_omp (gimple_omp_body_ptr (stmt), octx);
10325 if (before)
10327 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10328 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10333 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10334 substitution of a couple of function calls. But in the NAMED case,
10335 requires that languages coordinate a symbol name. It is therefore
10336 best put here in common code. */
10338 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10340 static void
10341 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10343 tree block;
10344 tree name, lock, unlock;
10345 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10346 gbind *bind;
10347 location_t loc = gimple_location (stmt);
10348 gimple_seq tbody;
10350 name = gimple_omp_critical_name (stmt);
10351 if (name)
10353 tree decl;
10355 if (!critical_name_mutexes)
10356 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10358 tree *n = critical_name_mutexes->get (name);
10359 if (n == NULL)
10361 char *new_str;
10363 decl = create_tmp_var_raw (ptr_type_node);
10365 new_str = ACONCAT ((".gomp_critical_user_",
10366 IDENTIFIER_POINTER (name), NULL));
10367 DECL_NAME (decl) = get_identifier (new_str);
10368 TREE_PUBLIC (decl) = 1;
10369 TREE_STATIC (decl) = 1;
10370 DECL_COMMON (decl) = 1;
10371 DECL_ARTIFICIAL (decl) = 1;
10372 DECL_IGNORED_P (decl) = 1;
10374 varpool_node::finalize_decl (decl);
10376 critical_name_mutexes->put (name, decl);
10378 else
10379 decl = *n;
10381 /* If '#pragma omp critical' is inside offloaded region or
10382 inside function marked as offloadable, the symbol must be
10383 marked as offloadable too. */
10384 omp_context *octx;
10385 if (cgraph_node::get (current_function_decl)->offloadable)
10386 varpool_node::get_create (decl)->offloadable = 1;
10387 else
10388 for (octx = ctx->outer; octx; octx = octx->outer)
10389 if (is_gimple_omp_offloaded (octx->stmt))
10391 varpool_node::get_create (decl)->offloadable = 1;
10392 break;
10395 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10396 lock = build_call_expr_loc (loc, lock, 1,
10397 build_fold_addr_expr_loc (loc, decl));
10399 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10400 unlock = build_call_expr_loc (loc, unlock, 1,
10401 build_fold_addr_expr_loc (loc, decl));
10403 else
10405 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10406 lock = build_call_expr_loc (loc, lock, 0);
10408 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10409 unlock = build_call_expr_loc (loc, unlock, 0);
10412 push_gimplify_context ();
10414 block = make_node (BLOCK);
10415 bind = gimple_build_bind (NULL, NULL, block);
10416 gsi_replace (gsi_p, bind, true);
10417 gimple_bind_add_stmt (bind, stmt);
10419 tbody = gimple_bind_body (bind);
10420 gimplify_and_add (lock, &tbody);
10421 gimple_bind_set_body (bind, tbody);
10423 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10424 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10425 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10426 gimple_omp_set_body (stmt, NULL);
10428 tbody = gimple_bind_body (bind);
10429 gimplify_and_add (unlock, &tbody);
10430 gimple_bind_set_body (bind, tbody);
10432 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10434 pop_gimplify_context (bind);
10435 gimple_bind_append_vars (bind, ctx->block_vars);
10436 BLOCK_VARS (block) = gimple_bind_vars (bind);
10439 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10440 for a lastprivate clause. Given a loop control predicate of (V
10441 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10442 is appended to *DLIST, iterator initialization is appended to
10443 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10444 to be emitted in a critical section. */
10446 static void
10447 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10448 gimple_seq *dlist, gimple_seq *clist,
10449 struct omp_context *ctx)
10451 tree clauses, cond, vinit;
10452 enum tree_code cond_code;
10453 gimple_seq stmts;
10455 cond_code = fd->loop.cond_code;
10456 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10458 /* When possible, use a strict equality expression. This can let VRP
10459 type optimizations deduce the value and remove a copy. */
10460 if (tree_fits_shwi_p (fd->loop.step))
10462 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10463 if (step == 1 || step == -1)
10464 cond_code = EQ_EXPR;
10467 tree n2 = fd->loop.n2;
10468 if (fd->collapse > 1
10469 && TREE_CODE (n2) != INTEGER_CST
10470 && gimple_omp_for_combined_into_p (fd->for_stmt))
10472 struct omp_context *taskreg_ctx = NULL;
10473 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10475 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10476 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10477 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10479 if (gimple_omp_for_combined_into_p (gfor))
10481 gcc_assert (ctx->outer->outer
10482 && is_parallel_ctx (ctx->outer->outer));
10483 taskreg_ctx = ctx->outer->outer;
10485 else
10487 struct omp_for_data outer_fd;
10488 omp_extract_for_data (gfor, &outer_fd, NULL);
10489 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10492 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10493 taskreg_ctx = ctx->outer->outer;
10495 else if (is_taskreg_ctx (ctx->outer))
10496 taskreg_ctx = ctx->outer;
10497 if (taskreg_ctx)
10499 int i;
10500 tree taskreg_clauses
10501 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10502 tree innerc = omp_find_clause (taskreg_clauses,
10503 OMP_CLAUSE__LOOPTEMP_);
10504 gcc_assert (innerc);
10505 int count = fd->collapse;
10506 if (fd->non_rect
10507 && fd->last_nonrect == fd->first_nonrect + 1)
10508 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10509 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10510 count += 4;
10511 for (i = 0; i < count; i++)
10513 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10514 OMP_CLAUSE__LOOPTEMP_);
10515 gcc_assert (innerc);
10517 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10518 OMP_CLAUSE__LOOPTEMP_);
10519 if (innerc)
10520 n2 = fold_convert (TREE_TYPE (n2),
10521 lookup_decl (OMP_CLAUSE_DECL (innerc),
10522 taskreg_ctx));
10525 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10527 clauses = gimple_omp_for_clauses (fd->for_stmt);
10528 stmts = NULL;
10529 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10530 if (!gimple_seq_empty_p (stmts))
10532 gimple_seq_add_seq (&stmts, *dlist);
10533 *dlist = stmts;
10535 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10536 vinit = fd->loop.n1;
10537 if (cond_code == EQ_EXPR
10538 && tree_fits_shwi_p (fd->loop.n2)
10539 && ! integer_zerop (fd->loop.n2))
10540 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10541 else
10542 vinit = unshare_expr (vinit);
10544 /* Initialize the iterator variable, so that threads that don't execute
10545 any iterations don't execute the lastprivate clauses by accident. */
10546 gimplify_assign (fd->loop.v, vinit, body_p);
10550 /* OpenACC privatization.
10552 Or, in other words, *sharing* at the respective OpenACC level of
10553 parallelism.
10555 From a correctness perspective, a non-addressable variable can't be accessed
10556 outside the current thread, so it can go in a (faster than shared memory)
10557 register -- though that register may need to be broadcast in some
10558 circumstances. A variable can only meaningfully be "shared" across workers
10559 or vector lanes if its address is taken, e.g. by a call to an atomic
10560 builtin.
10562 From an optimisation perspective, the answer might be fuzzier: maybe
10563 sometimes, using shared memory directly would be faster than
10564 broadcasting. */
10566 static void
10567 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10568 const location_t loc, const tree c,
10569 const tree decl)
10571 const dump_user_location_t d_u_loc
10572 = dump_user_location_t::from_location_t (loc);
10573 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10574 #if __GNUC__ >= 10
10575 # pragma GCC diagnostic push
10576 # pragma GCC diagnostic ignored "-Wformat"
10577 #endif
10578 dump_printf_loc (l_dump_flags, d_u_loc,
10579 "variable %<%T%> ", decl);
10580 #if __GNUC__ >= 10
10581 # pragma GCC diagnostic pop
10582 #endif
10583 if (c)
10584 dump_printf (l_dump_flags,
10585 "in %qs clause ",
10586 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10587 else
10588 dump_printf (l_dump_flags,
10589 "declared in block ");
10592 static bool
10593 oacc_privatization_candidate_p (const location_t loc, const tree c,
10594 const tree decl)
10596 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10598 /* There is some differentiation depending on block vs. clause. */
10599 bool block = !c;
10601 bool res = true;
10603 if (res && !VAR_P (decl))
10605 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10606 privatized into a new VAR_DECL. */
10607 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10609 res = false;
10611 if (dump_enabled_p ())
10613 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10614 dump_printf (l_dump_flags,
10615 "potentially has improper OpenACC privatization level: %qs\n",
10616 get_tree_code_name (TREE_CODE (decl)));
10620 if (res && block && TREE_STATIC (decl))
10622 res = false;
10624 if (dump_enabled_p ())
10626 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10627 dump_printf (l_dump_flags,
10628 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10629 "static");
10633 if (res && block && DECL_EXTERNAL (decl))
10635 res = false;
10637 if (dump_enabled_p ())
10639 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10640 dump_printf (l_dump_flags,
10641 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10642 "external");
10646 if (res && !TREE_ADDRESSABLE (decl))
10648 res = false;
10650 if (dump_enabled_p ())
10652 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10653 dump_printf (l_dump_flags,
10654 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10655 "not addressable");
10659 if (res)
10661 if (dump_enabled_p ())
10663 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10664 dump_printf (l_dump_flags,
10665 "is candidate for adjusting OpenACC privatization level\n");
10669 if (dump_file && (dump_flags & TDF_DETAILS))
10671 print_generic_decl (dump_file, decl, dump_flags);
10672 fprintf (dump_file, "\n");
10675 return res;
10678 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10679 CTX. */
10681 static void
10682 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10684 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10685 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10687 tree decl = OMP_CLAUSE_DECL (c);
10689 tree new_decl = lookup_decl (decl, ctx);
10691 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10692 new_decl))
10693 continue;
10695 gcc_checking_assert
10696 (!ctx->oacc_privatization_candidates.contains (new_decl));
10697 ctx->oacc_privatization_candidates.safe_push (new_decl);
10701 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10702 CTX. */
10704 static void
10705 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10707 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10709 tree new_decl = lookup_decl (decl, ctx);
10710 gcc_checking_assert (new_decl == decl);
10712 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10713 new_decl))
10714 continue;
10716 gcc_checking_assert
10717 (!ctx->oacc_privatization_candidates.contains (new_decl));
10718 ctx->oacc_privatization_candidates.safe_push (new_decl);
10722 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10724 static tree
10725 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10726 struct walk_stmt_info *wi)
10728 gimple *stmt = gsi_stmt (*gsi_p);
10730 *handled_ops_p = true;
10731 switch (gimple_code (stmt))
10733 WALK_SUBSTMTS;
10735 case GIMPLE_OMP_FOR:
10736 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10737 && gimple_omp_for_combined_into_p (stmt))
10738 *handled_ops_p = false;
10739 break;
10741 case GIMPLE_OMP_SCAN:
10742 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10743 return integer_zero_node;
10744 default:
10745 break;
10747 return NULL;
10750 /* Helper function for lower_omp_for, add transformations for a worksharing
10751 loop with scan directives inside of it.
10752 For worksharing loop not combined with simd, transform:
10753 #pragma omp for reduction(inscan,+:r) private(i)
10754 for (i = 0; i < n; i = i + 1)
10757 update (r);
10759 #pragma omp scan inclusive(r)
10761 use (r);
10765 into two worksharing loops + code to merge results:
10767 num_threads = omp_get_num_threads ();
10768 thread_num = omp_get_thread_num ();
10769 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10770 <D.2099>:
10771 var2 = r;
10772 goto <D.2101>;
10773 <D.2100>:
10774 // For UDRs this is UDR init, or if ctors are needed, copy from
10775 // var3 that has been constructed to contain the neutral element.
10776 var2 = 0;
10777 <D.2101>:
10778 ivar = 0;
10779 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10780 // a shared array with num_threads elements and rprivb to a local array
10781 // number of elements equal to the number of (contiguous) iterations the
10782 // current thread will perform. controlb and controlp variables are
10783 // temporaries to handle deallocation of rprivb at the end of second
10784 // GOMP_FOR.
10785 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10786 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10787 for (i = 0; i < n; i = i + 1)
10790 // For UDRs this is UDR init or copy from var3.
10791 r = 0;
10792 // This is the input phase from user code.
10793 update (r);
10796 // For UDRs this is UDR merge.
10797 var2 = var2 + r;
10798 // Rather than handing it over to the user, save to local thread's
10799 // array.
10800 rprivb[ivar] = var2;
10801 // For exclusive scan, the above two statements are swapped.
10802 ivar = ivar + 1;
10805 // And remember the final value from this thread's into the shared
10806 // rpriva array.
10807 rpriva[(sizetype) thread_num] = var2;
10808 // If more than one thread, compute using Work-Efficient prefix sum
10809 // the inclusive parallel scan of the rpriva array.
10810 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10811 <D.2102>:
10812 GOMP_barrier ();
10813 down = 0;
10814 k = 1;
10815 num_threadsu = (unsigned int) num_threads;
10816 thread_numup1 = (unsigned int) thread_num + 1;
10817 <D.2108>:
10818 twok = k << 1;
10819 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10820 <D.2110>:
10821 down = 4294967295;
10822 k = k >> 1;
10823 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10824 <D.2112>:
10825 k = k >> 1;
10826 <D.2111>:
10827 twok = k << 1;
10828 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10829 mul = REALPART_EXPR <cplx>;
10830 ovf = IMAGPART_EXPR <cplx>;
10831 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10832 <D.2116>:
10833 andv = k & down;
10834 andvm1 = andv + 4294967295;
10835 l = mul + andvm1;
10836 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10837 <D.2120>:
10838 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10839 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10840 rpriva[l] = rpriva[l - k] + rpriva[l];
10841 <D.2117>:
10842 if (down == 0) goto <D.2121>; else goto <D.2122>;
10843 <D.2121>:
10844 k = k << 1;
10845 goto <D.2123>;
10846 <D.2122>:
10847 k = k >> 1;
10848 <D.2123>:
10849 GOMP_barrier ();
10850 if (k != 0) goto <D.2108>; else goto <D.2103>;
10851 <D.2103>:
10852 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10853 <D.2124>:
10854 // For UDRs this is UDR init or copy from var3.
10855 var2 = 0;
10856 goto <D.2126>;
10857 <D.2125>:
10858 var2 = rpriva[thread_num - 1];
10859 <D.2126>:
10860 ivar = 0;
10861 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10862 reduction(inscan,+:r) private(i)
10863 for (i = 0; i < n; i = i + 1)
10866 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10867 r = var2 + rprivb[ivar];
10870 // This is the scan phase from user code.
10871 use (r);
10872 // Plus a bump of the iterator.
10873 ivar = ivar + 1;
10875 } */
10877 static void
10878 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10879 struct omp_for_data *fd, omp_context *ctx)
10881 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10882 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10884 gimple_seq body = gimple_omp_body (stmt);
10885 gimple_stmt_iterator input1_gsi = gsi_none ();
10886 struct walk_stmt_info wi;
10887 memset (&wi, 0, sizeof (wi));
10888 wi.val_only = true;
10889 wi.info = (void *) &input1_gsi;
10890 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10891 gcc_assert (!gsi_end_p (input1_gsi));
10893 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10894 gimple_stmt_iterator gsi = input1_gsi;
10895 gsi_next (&gsi);
10896 gimple_stmt_iterator scan1_gsi = gsi;
10897 gimple *scan_stmt1 = gsi_stmt (gsi);
10898 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10900 gimple_seq input_body = gimple_omp_body (input_stmt1);
10901 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10902 gimple_omp_set_body (input_stmt1, NULL);
10903 gimple_omp_set_body (scan_stmt1, NULL);
10904 gimple_omp_set_body (stmt, NULL);
10906 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10907 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10908 gimple_omp_set_body (stmt, body);
10909 gimple_omp_set_body (input_stmt1, input_body);
10911 gimple_stmt_iterator input2_gsi = gsi_none ();
10912 memset (&wi, 0, sizeof (wi));
10913 wi.val_only = true;
10914 wi.info = (void *) &input2_gsi;
10915 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10916 gcc_assert (!gsi_end_p (input2_gsi));
10918 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10919 gsi = input2_gsi;
10920 gsi_next (&gsi);
10921 gimple_stmt_iterator scan2_gsi = gsi;
10922 gimple *scan_stmt2 = gsi_stmt (gsi);
10923 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10924 gimple_omp_set_body (scan_stmt2, scan_body);
10926 gimple_stmt_iterator input3_gsi = gsi_none ();
10927 gimple_stmt_iterator scan3_gsi = gsi_none ();
10928 gimple_stmt_iterator input4_gsi = gsi_none ();
10929 gimple_stmt_iterator scan4_gsi = gsi_none ();
10930 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10931 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10932 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10933 if (is_for_simd)
10935 memset (&wi, 0, sizeof (wi));
10936 wi.val_only = true;
10937 wi.info = (void *) &input3_gsi;
10938 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10939 gcc_assert (!gsi_end_p (input3_gsi));
10941 input_stmt3 = gsi_stmt (input3_gsi);
10942 gsi = input3_gsi;
10943 gsi_next (&gsi);
10944 scan3_gsi = gsi;
10945 scan_stmt3 = gsi_stmt (gsi);
10946 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10948 memset (&wi, 0, sizeof (wi));
10949 wi.val_only = true;
10950 wi.info = (void *) &input4_gsi;
10951 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10952 gcc_assert (!gsi_end_p (input4_gsi));
10954 input_stmt4 = gsi_stmt (input4_gsi);
10955 gsi = input4_gsi;
10956 gsi_next (&gsi);
10957 scan4_gsi = gsi;
10958 scan_stmt4 = gsi_stmt (gsi);
10959 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10961 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10962 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10965 tree num_threads = create_tmp_var (integer_type_node);
10966 tree thread_num = create_tmp_var (integer_type_node);
10967 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10968 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10969 gimple *g = gimple_build_call (nthreads_decl, 0);
10970 gimple_call_set_lhs (g, num_threads);
10971 gimple_seq_add_stmt (body_p, g);
10972 g = gimple_build_call (threadnum_decl, 0);
10973 gimple_call_set_lhs (g, thread_num);
10974 gimple_seq_add_stmt (body_p, g);
10976 tree ivar = create_tmp_var (sizetype);
10977 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10978 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10979 tree k = create_tmp_var (unsigned_type_node);
10980 tree l = create_tmp_var (unsigned_type_node);
10982 gimple_seq clist = NULL, mdlist = NULL;
10983 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10984 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10985 gimple_seq scan1_list = NULL, input2_list = NULL;
10986 gimple_seq last_list = NULL, reduc_list = NULL;
10987 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10988 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10989 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10991 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10992 tree var = OMP_CLAUSE_DECL (c);
10993 tree new_var = lookup_decl (var, ctx);
10994 tree var3 = NULL_TREE;
10995 tree new_vard = new_var;
10996 if (omp_privatize_by_reference (var))
10997 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10998 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11000 var3 = maybe_lookup_decl (new_vard, ctx);
11001 if (var3 == new_vard)
11002 var3 = NULL_TREE;
11005 tree ptype = build_pointer_type (TREE_TYPE (new_var));
11006 tree rpriva = create_tmp_var (ptype);
11007 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11008 OMP_CLAUSE_DECL (nc) = rpriva;
11009 *cp1 = nc;
11010 cp1 = &OMP_CLAUSE_CHAIN (nc);
11012 tree rprivb = create_tmp_var (ptype);
11013 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11014 OMP_CLAUSE_DECL (nc) = rprivb;
11015 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
11016 *cp1 = nc;
11017 cp1 = &OMP_CLAUSE_CHAIN (nc);
11019 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
11020 if (new_vard != new_var)
11021 TREE_ADDRESSABLE (var2) = 1;
11022 gimple_add_tmp_var (var2);
11024 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11025 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11026 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11027 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11028 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11030 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11031 thread_num, integer_minus_one_node);
11032 x = fold_convert_loc (clause_loc, sizetype, x);
11033 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11034 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11035 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11036 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11038 x = fold_convert_loc (clause_loc, sizetype, l);
11039 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11040 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11041 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11042 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11044 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11045 x = fold_convert_loc (clause_loc, sizetype, x);
11046 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11047 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11048 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11049 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11051 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11052 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11053 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11054 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11056 tree var4 = is_for_simd ? new_var : var2;
11057 tree var5 = NULL_TREE, var6 = NULL_TREE;
11058 if (is_for_simd)
11060 var5 = lookup_decl (var, input_simd_ctx);
11061 var6 = lookup_decl (var, scan_simd_ctx);
11062 if (new_vard != new_var)
11064 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11065 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11068 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11070 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11071 tree val = var2;
11073 x = lang_hooks.decls.omp_clause_default_ctor
11074 (c, var2, build_outer_var_ref (var, ctx));
11075 if (x)
11076 gimplify_and_add (x, &clist);
11078 x = build_outer_var_ref (var, ctx);
11079 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11081 gimplify_and_add (x, &thr01_list);
11083 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11084 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11085 if (var3)
11087 x = unshare_expr (var4);
11088 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11089 gimplify_and_add (x, &thrn1_list);
11090 x = unshare_expr (var4);
11091 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11092 gimplify_and_add (x, &thr02_list);
11094 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11096 /* Otherwise, assign to it the identity element. */
11097 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11098 tseq = copy_gimple_seq_and_replace_locals (tseq);
11099 if (!is_for_simd)
11101 if (new_vard != new_var)
11102 val = build_fold_addr_expr_loc (clause_loc, val);
11103 SET_DECL_VALUE_EXPR (new_vard, val);
11104 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11106 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11107 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11108 lower_omp (&tseq, ctx);
11109 gimple_seq_add_seq (&thrn1_list, tseq);
11110 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11111 lower_omp (&tseq, ctx);
11112 gimple_seq_add_seq (&thr02_list, tseq);
11113 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11114 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11115 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11116 if (y)
11117 SET_DECL_VALUE_EXPR (new_vard, y);
11118 else
11120 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11121 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11125 x = unshare_expr (var4);
11126 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11127 gimplify_and_add (x, &thrn2_list);
11129 if (is_for_simd)
11131 x = unshare_expr (rprivb_ref);
11132 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11133 gimplify_and_add (x, &scan1_list);
11135 else
11137 if (ctx->scan_exclusive)
11139 x = unshare_expr (rprivb_ref);
11140 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11141 gimplify_and_add (x, &scan1_list);
11144 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11145 tseq = copy_gimple_seq_and_replace_locals (tseq);
11146 SET_DECL_VALUE_EXPR (placeholder, var2);
11147 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11148 lower_omp (&tseq, ctx);
11149 gimple_seq_add_seq (&scan1_list, tseq);
11151 if (ctx->scan_inclusive)
11153 x = unshare_expr (rprivb_ref);
11154 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11155 gimplify_and_add (x, &scan1_list);
11159 x = unshare_expr (rpriva_ref);
11160 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11161 unshare_expr (var4));
11162 gimplify_and_add (x, &mdlist);
11164 x = unshare_expr (is_for_simd ? var6 : new_var);
11165 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11166 gimplify_and_add (x, &input2_list);
11168 val = rprivb_ref;
11169 if (new_vard != new_var)
11170 val = build_fold_addr_expr_loc (clause_loc, val);
11172 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11173 tseq = copy_gimple_seq_and_replace_locals (tseq);
11174 SET_DECL_VALUE_EXPR (new_vard, val);
11175 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11176 if (is_for_simd)
11178 SET_DECL_VALUE_EXPR (placeholder, var6);
11179 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11181 else
11182 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11183 lower_omp (&tseq, ctx);
11184 if (y)
11185 SET_DECL_VALUE_EXPR (new_vard, y);
11186 else
11188 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11189 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11191 if (!is_for_simd)
11193 SET_DECL_VALUE_EXPR (placeholder, new_var);
11194 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11195 lower_omp (&tseq, ctx);
11197 gimple_seq_add_seq (&input2_list, tseq);
11199 x = build_outer_var_ref (var, ctx);
11200 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11201 gimplify_and_add (x, &last_list);
11203 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11204 gimplify_and_add (x, &reduc_list);
11205 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11206 tseq = copy_gimple_seq_and_replace_locals (tseq);
11207 val = rprival_ref;
11208 if (new_vard != new_var)
11209 val = build_fold_addr_expr_loc (clause_loc, val);
11210 SET_DECL_VALUE_EXPR (new_vard, val);
11211 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11212 SET_DECL_VALUE_EXPR (placeholder, var2);
11213 lower_omp (&tseq, ctx);
11214 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11215 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11216 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11217 if (y)
11218 SET_DECL_VALUE_EXPR (new_vard, y);
11219 else
11221 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11222 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11224 gimple_seq_add_seq (&reduc_list, tseq);
11225 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11226 gimplify_and_add (x, &reduc_list);
11228 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11229 if (x)
11230 gimplify_and_add (x, dlist);
11232 else
11234 x = build_outer_var_ref (var, ctx);
11235 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11237 x = omp_reduction_init (c, TREE_TYPE (new_var));
11238 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11239 &thrn1_list);
11240 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11242 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11244 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11245 if (code == MINUS_EXPR)
11246 code = PLUS_EXPR;
11248 if (is_for_simd)
11249 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11250 else
11252 if (ctx->scan_exclusive)
11253 gimplify_assign (unshare_expr (rprivb_ref), var2,
11254 &scan1_list);
11255 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11256 gimplify_assign (var2, x, &scan1_list);
11257 if (ctx->scan_inclusive)
11258 gimplify_assign (unshare_expr (rprivb_ref), var2,
11259 &scan1_list);
11262 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11263 &mdlist);
11265 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11266 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11268 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11269 &last_list);
11271 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11272 unshare_expr (rprival_ref));
11273 gimplify_assign (rprival_ref, x, &reduc_list);
11277 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11278 gimple_seq_add_stmt (&scan1_list, g);
11279 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11280 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11281 ? scan_stmt4 : scan_stmt2), g);
11283 tree controlb = create_tmp_var (boolean_type_node);
11284 tree controlp = create_tmp_var (ptr_type_node);
11285 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11286 OMP_CLAUSE_DECL (nc) = controlb;
11287 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11288 *cp1 = nc;
11289 cp1 = &OMP_CLAUSE_CHAIN (nc);
11290 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11291 OMP_CLAUSE_DECL (nc) = controlp;
11292 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11293 *cp1 = nc;
11294 cp1 = &OMP_CLAUSE_CHAIN (nc);
11295 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11296 OMP_CLAUSE_DECL (nc) = controlb;
11297 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11298 *cp2 = nc;
11299 cp2 = &OMP_CLAUSE_CHAIN (nc);
11300 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11301 OMP_CLAUSE_DECL (nc) = controlp;
11302 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11303 *cp2 = nc;
11304 cp2 = &OMP_CLAUSE_CHAIN (nc);
11306 *cp1 = gimple_omp_for_clauses (stmt);
11307 gimple_omp_for_set_clauses (stmt, new_clauses1);
11308 *cp2 = gimple_omp_for_clauses (new_stmt);
11309 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11311 if (is_for_simd)
11313 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11314 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11316 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11317 GSI_SAME_STMT);
11318 gsi_remove (&input3_gsi, true);
11319 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11320 GSI_SAME_STMT);
11321 gsi_remove (&scan3_gsi, true);
11322 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11323 GSI_SAME_STMT);
11324 gsi_remove (&input4_gsi, true);
11325 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11326 GSI_SAME_STMT);
11327 gsi_remove (&scan4_gsi, true);
11329 else
11331 gimple_omp_set_body (scan_stmt1, scan1_list);
11332 gimple_omp_set_body (input_stmt2, input2_list);
11335 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11336 GSI_SAME_STMT);
11337 gsi_remove (&input1_gsi, true);
11338 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11339 GSI_SAME_STMT);
11340 gsi_remove (&scan1_gsi, true);
11341 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11342 GSI_SAME_STMT);
11343 gsi_remove (&input2_gsi, true);
11344 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11345 GSI_SAME_STMT);
11346 gsi_remove (&scan2_gsi, true);
11348 gimple_seq_add_seq (body_p, clist);
11350 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11351 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11352 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11353 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11354 gimple_seq_add_stmt (body_p, g);
11355 g = gimple_build_label (lab1);
11356 gimple_seq_add_stmt (body_p, g);
11357 gimple_seq_add_seq (body_p, thr01_list);
11358 g = gimple_build_goto (lab3);
11359 gimple_seq_add_stmt (body_p, g);
11360 g = gimple_build_label (lab2);
11361 gimple_seq_add_stmt (body_p, g);
11362 gimple_seq_add_seq (body_p, thrn1_list);
11363 g = gimple_build_label (lab3);
11364 gimple_seq_add_stmt (body_p, g);
11366 g = gimple_build_assign (ivar, size_zero_node);
11367 gimple_seq_add_stmt (body_p, g);
11369 gimple_seq_add_stmt (body_p, stmt);
11370 gimple_seq_add_seq (body_p, body);
11371 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11372 fd->loop.v));
11374 g = gimple_build_omp_return (true);
11375 gimple_seq_add_stmt (body_p, g);
11376 gimple_seq_add_seq (body_p, mdlist);
11378 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11379 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11380 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11381 gimple_seq_add_stmt (body_p, g);
11382 g = gimple_build_label (lab1);
11383 gimple_seq_add_stmt (body_p, g);
11385 g = omp_build_barrier (NULL);
11386 gimple_seq_add_stmt (body_p, g);
11388 tree down = create_tmp_var (unsigned_type_node);
11389 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11390 gimple_seq_add_stmt (body_p, g);
11392 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11393 gimple_seq_add_stmt (body_p, g);
11395 tree num_threadsu = create_tmp_var (unsigned_type_node);
11396 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11397 gimple_seq_add_stmt (body_p, g);
11399 tree thread_numu = create_tmp_var (unsigned_type_node);
11400 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11401 gimple_seq_add_stmt (body_p, g);
11403 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11404 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11405 build_int_cst (unsigned_type_node, 1));
11406 gimple_seq_add_stmt (body_p, g);
11408 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11409 g = gimple_build_label (lab3);
11410 gimple_seq_add_stmt (body_p, g);
11412 tree twok = create_tmp_var (unsigned_type_node);
11413 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11414 gimple_seq_add_stmt (body_p, g);
11416 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11417 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11418 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11419 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11420 gimple_seq_add_stmt (body_p, g);
11421 g = gimple_build_label (lab4);
11422 gimple_seq_add_stmt (body_p, g);
11423 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11424 gimple_seq_add_stmt (body_p, g);
11425 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11426 gimple_seq_add_stmt (body_p, g);
11428 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11429 gimple_seq_add_stmt (body_p, g);
11430 g = gimple_build_label (lab6);
11431 gimple_seq_add_stmt (body_p, g);
11433 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11434 gimple_seq_add_stmt (body_p, g);
11436 g = gimple_build_label (lab5);
11437 gimple_seq_add_stmt (body_p, g);
11439 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11440 gimple_seq_add_stmt (body_p, g);
11442 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11443 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11444 gimple_call_set_lhs (g, cplx);
11445 gimple_seq_add_stmt (body_p, g);
11446 tree mul = create_tmp_var (unsigned_type_node);
11447 g = gimple_build_assign (mul, REALPART_EXPR,
11448 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11449 gimple_seq_add_stmt (body_p, g);
11450 tree ovf = create_tmp_var (unsigned_type_node);
11451 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11452 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11453 gimple_seq_add_stmt (body_p, g);
11455 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11456 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11457 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11458 lab7, lab8);
11459 gimple_seq_add_stmt (body_p, g);
11460 g = gimple_build_label (lab7);
11461 gimple_seq_add_stmt (body_p, g);
11463 tree andv = create_tmp_var (unsigned_type_node);
11464 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11465 gimple_seq_add_stmt (body_p, g);
11466 tree andvm1 = create_tmp_var (unsigned_type_node);
11467 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11468 build_minus_one_cst (unsigned_type_node));
11469 gimple_seq_add_stmt (body_p, g);
11471 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11472 gimple_seq_add_stmt (body_p, g);
11474 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11475 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11476 gimple_seq_add_stmt (body_p, g);
11477 g = gimple_build_label (lab9);
11478 gimple_seq_add_stmt (body_p, g);
11479 gimple_seq_add_seq (body_p, reduc_list);
11480 g = gimple_build_label (lab8);
11481 gimple_seq_add_stmt (body_p, g);
11483 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11484 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11485 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11486 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11487 lab10, lab11);
11488 gimple_seq_add_stmt (body_p, g);
11489 g = gimple_build_label (lab10);
11490 gimple_seq_add_stmt (body_p, g);
11491 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11492 gimple_seq_add_stmt (body_p, g);
11493 g = gimple_build_goto (lab12);
11494 gimple_seq_add_stmt (body_p, g);
11495 g = gimple_build_label (lab11);
11496 gimple_seq_add_stmt (body_p, g);
11497 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11498 gimple_seq_add_stmt (body_p, g);
11499 g = gimple_build_label (lab12);
11500 gimple_seq_add_stmt (body_p, g);
11502 g = omp_build_barrier (NULL);
11503 gimple_seq_add_stmt (body_p, g);
11505 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11506 lab3, lab2);
11507 gimple_seq_add_stmt (body_p, g);
11509 g = gimple_build_label (lab2);
11510 gimple_seq_add_stmt (body_p, g);
11512 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11513 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11514 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11515 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11516 gimple_seq_add_stmt (body_p, g);
11517 g = gimple_build_label (lab1);
11518 gimple_seq_add_stmt (body_p, g);
11519 gimple_seq_add_seq (body_p, thr02_list);
11520 g = gimple_build_goto (lab3);
11521 gimple_seq_add_stmt (body_p, g);
11522 g = gimple_build_label (lab2);
11523 gimple_seq_add_stmt (body_p, g);
11524 gimple_seq_add_seq (body_p, thrn2_list);
11525 g = gimple_build_label (lab3);
11526 gimple_seq_add_stmt (body_p, g);
11528 g = gimple_build_assign (ivar, size_zero_node);
11529 gimple_seq_add_stmt (body_p, g);
11530 gimple_seq_add_stmt (body_p, new_stmt);
11531 gimple_seq_add_seq (body_p, new_body);
11533 gimple_seq new_dlist = NULL;
11534 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11535 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11536 tree num_threadsm1 = create_tmp_var (integer_type_node);
11537 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11538 integer_minus_one_node);
11539 gimple_seq_add_stmt (&new_dlist, g);
11540 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11541 gimple_seq_add_stmt (&new_dlist, g);
11542 g = gimple_build_label (lab1);
11543 gimple_seq_add_stmt (&new_dlist, g);
11544 gimple_seq_add_seq (&new_dlist, last_list);
11545 g = gimple_build_label (lab2);
11546 gimple_seq_add_stmt (&new_dlist, g);
11547 gimple_seq_add_seq (&new_dlist, *dlist);
11548 *dlist = new_dlist;
11551 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11552 the addresses of variables to be made private at the surrounding
11553 parallelism level. Such functions appear in the gimple code stream in two
11554 forms, e.g. for a partitioned loop:
11556 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11557 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11558 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11559 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11561 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11562 not as part of a HEAD_MARK sequence:
11564 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11566 For such stand-alone appearances, the 3rd argument is always 0, denoting
11567 gang partitioning. */
11569 static gcall *
11570 lower_oacc_private_marker (omp_context *ctx)
11572 if (ctx->oacc_privatization_candidates.length () == 0)
11573 return NULL;
11575 auto_vec<tree, 5> args;
11577 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11578 args.quick_push (integer_zero_node);
11579 args.quick_push (integer_minus_one_node);
11581 int i;
11582 tree decl;
11583 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11585 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11586 tree addr = build_fold_addr_expr (decl);
11587 args.safe_push (addr);
11590 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11593 /* Lower code for an OMP loop directive. */
11595 static void
11596 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11598 tree *rhs_p, block;
11599 struct omp_for_data fd, *fdp = NULL;
11600 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11601 gbind *new_stmt;
11602 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11603 gimple_seq cnt_list = NULL, clist = NULL;
11604 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11605 size_t i;
11607 push_gimplify_context ();
11609 if (is_gimple_omp_oacc (ctx->stmt))
11610 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11612 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11614 block = make_node (BLOCK);
11615 new_stmt = gimple_build_bind (NULL, NULL, block);
11616 /* Replace at gsi right away, so that 'stmt' is no member
11617 of a sequence anymore as we're going to add to a different
11618 one below. */
11619 gsi_replace (gsi_p, new_stmt, true);
11621 /* Move declaration of temporaries in the loop body before we make
11622 it go away. */
11623 omp_for_body = gimple_omp_body (stmt);
11624 if (!gimple_seq_empty_p (omp_for_body)
11625 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11627 gbind *inner_bind
11628 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11629 tree vars = gimple_bind_vars (inner_bind);
11630 if (is_gimple_omp_oacc (ctx->stmt))
11631 oacc_privatization_scan_decl_chain (ctx, vars);
11632 gimple_bind_append_vars (new_stmt, vars);
11633 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11634 keep them on the inner_bind and it's block. */
11635 gimple_bind_set_vars (inner_bind, NULL_TREE);
11636 if (gimple_bind_block (inner_bind))
11637 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11640 if (gimple_omp_for_combined_into_p (stmt))
11642 omp_extract_for_data (stmt, &fd, NULL);
11643 fdp = &fd;
11645 /* We need two temporaries with fd.loop.v type (istart/iend)
11646 and then (fd.collapse - 1) temporaries with the same
11647 type for count2 ... countN-1 vars if not constant. */
11648 size_t count = 2;
11649 tree type = fd.iter_type;
11650 if (fd.collapse > 1
11651 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11652 count += fd.collapse - 1;
11653 size_t count2 = 0;
11654 tree type2 = NULL_TREE;
11655 bool taskreg_for
11656 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11657 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11658 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11659 tree simtc = NULL;
11660 tree clauses = *pc;
11661 if (fd.collapse > 1
11662 && fd.non_rect
11663 && fd.last_nonrect == fd.first_nonrect + 1
11664 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11665 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11666 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11668 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11669 type2 = TREE_TYPE (v);
11670 count++;
11671 count2 = 3;
11673 if (taskreg_for)
11674 outerc
11675 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11676 OMP_CLAUSE__LOOPTEMP_);
11677 if (ctx->simt_stmt)
11678 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11679 OMP_CLAUSE__LOOPTEMP_);
11680 for (i = 0; i < count + count2; i++)
11682 tree temp;
11683 if (taskreg_for)
11685 gcc_assert (outerc);
11686 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11687 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11688 OMP_CLAUSE__LOOPTEMP_);
11690 else
11692 /* If there are 2 adjacent SIMD stmts, one with _simt_
11693 clause, another without, make sure they have the same
11694 decls in _looptemp_ clauses, because the outer stmt
11695 they are combined into will look up just one inner_stmt. */
11696 if (ctx->simt_stmt)
11697 temp = OMP_CLAUSE_DECL (simtc);
11698 else
11699 temp = create_tmp_var (i >= count ? type2 : type);
11700 insert_decl_map (&ctx->outer->cb, temp, temp);
11702 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11703 OMP_CLAUSE_DECL (*pc) = temp;
11704 pc = &OMP_CLAUSE_CHAIN (*pc);
11705 if (ctx->simt_stmt)
11706 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11707 OMP_CLAUSE__LOOPTEMP_);
11709 *pc = clauses;
11712 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11713 dlist = NULL;
11714 body = NULL;
11715 tree rclauses
11716 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11717 OMP_CLAUSE_REDUCTION);
11718 tree rtmp = NULL_TREE;
11719 if (rclauses)
11721 tree type = build_pointer_type (pointer_sized_int_node);
11722 tree temp = create_tmp_var (type);
11723 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11724 OMP_CLAUSE_DECL (c) = temp;
11725 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11726 gimple_omp_for_set_clauses (stmt, c);
11727 lower_omp_task_reductions (ctx, OMP_FOR,
11728 gimple_omp_for_clauses (stmt),
11729 &tred_ilist, &tred_dlist);
11730 rclauses = c;
11731 rtmp = make_ssa_name (type);
11732 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11735 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11736 ctx);
11738 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11739 fdp);
11740 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11741 gimple_omp_for_pre_body (stmt));
11743 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11745 gcall *private_marker = NULL;
11746 if (is_gimple_omp_oacc (ctx->stmt)
11747 && !gimple_seq_empty_p (omp_for_body))
11748 private_marker = lower_oacc_private_marker (ctx);
11750 /* Lower the header expressions. At this point, we can assume that
11751 the header is of the form:
11753 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11755 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11756 using the .omp_data_s mapping, if needed. */
11757 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11759 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11760 if (TREE_CODE (*rhs_p) == TREE_VEC)
11762 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11763 TREE_VEC_ELT (*rhs_p, 1)
11764 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11765 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11766 TREE_VEC_ELT (*rhs_p, 2)
11767 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11769 else if (!is_gimple_min_invariant (*rhs_p))
11770 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11771 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11772 recompute_tree_invariant_for_addr_expr (*rhs_p);
11774 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11775 if (TREE_CODE (*rhs_p) == TREE_VEC)
11777 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11778 TREE_VEC_ELT (*rhs_p, 1)
11779 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11780 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11781 TREE_VEC_ELT (*rhs_p, 2)
11782 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11784 else if (!is_gimple_min_invariant (*rhs_p))
11785 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11786 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11787 recompute_tree_invariant_for_addr_expr (*rhs_p);
11789 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11790 if (!is_gimple_min_invariant (*rhs_p))
11791 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11793 if (rclauses)
11794 gimple_seq_add_seq (&tred_ilist, cnt_list);
11795 else
11796 gimple_seq_add_seq (&body, cnt_list);
11798 /* Once lowered, extract the bounds and clauses. */
11799 omp_extract_for_data (stmt, &fd, NULL);
11801 if (is_gimple_omp_oacc (ctx->stmt)
11802 && !ctx_in_oacc_kernels_region (ctx))
11803 lower_oacc_head_tail (gimple_location (stmt),
11804 gimple_omp_for_clauses (stmt), private_marker,
11805 &oacc_head, &oacc_tail, ctx);
11807 /* Add OpenACC partitioning and reduction markers just before the loop. */
11808 if (oacc_head)
11809 gimple_seq_add_seq (&body, oacc_head);
11811 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11813 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11814 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11815 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11816 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11818 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11819 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11820 OMP_CLAUSE_LINEAR_STEP (c)
11821 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11822 ctx);
11825 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11826 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11827 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11828 else
11830 gimple_seq_add_stmt (&body, stmt);
11831 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11834 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11835 fd.loop.v));
11837 /* After the loop, add exit clauses. */
11838 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11840 if (clist)
11842 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11843 gcall *g = gimple_build_call (fndecl, 0);
11844 gimple_seq_add_stmt (&body, g);
11845 gimple_seq_add_seq (&body, clist);
11846 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11847 g = gimple_build_call (fndecl, 0);
11848 gimple_seq_add_stmt (&body, g);
11851 if (ctx->cancellable)
11852 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11854 gimple_seq_add_seq (&body, dlist);
11856 if (rclauses)
11858 gimple_seq_add_seq (&tred_ilist, body);
11859 body = tred_ilist;
11862 body = maybe_catch_exception (body);
11864 /* Region exit marker goes at the end of the loop body. */
11865 gimple *g = gimple_build_omp_return (fd.have_nowait);
11866 gimple_seq_add_stmt (&body, g);
11868 gimple_seq_add_seq (&body, tred_dlist);
11870 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11872 if (rclauses)
11873 OMP_CLAUSE_DECL (rclauses) = rtmp;
11875 /* Add OpenACC joining and reduction markers just after the loop. */
11876 if (oacc_tail)
11877 gimple_seq_add_seq (&body, oacc_tail);
11879 pop_gimplify_context (new_stmt);
11881 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11882 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11883 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11884 if (BLOCK_VARS (block))
11885 TREE_USED (block) = 1;
11887 gimple_bind_set_body (new_stmt, body);
11888 gimple_omp_set_body (stmt, NULL);
11889 gimple_omp_for_set_pre_body (stmt, NULL);
11892 /* Callback for walk_stmts. Check if the current statement only contains
11893 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11895 static tree
11896 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11897 bool *handled_ops_p,
11898 struct walk_stmt_info *wi)
11900 int *info = (int *) wi->info;
11901 gimple *stmt = gsi_stmt (*gsi_p);
11903 *handled_ops_p = true;
11904 switch (gimple_code (stmt))
11906 WALK_SUBSTMTS;
11908 case GIMPLE_DEBUG:
11909 break;
11910 case GIMPLE_OMP_FOR:
11911 case GIMPLE_OMP_SECTIONS:
11912 *info = *info == 0 ? 1 : -1;
11913 break;
11914 default:
11915 *info = -1;
11916 break;
11918 return NULL;
11921 struct omp_taskcopy_context
11923 /* This field must be at the beginning, as we do "inheritance": Some
11924 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11925 receive a copy_body_data pointer that is up-casted to an
11926 omp_context pointer. */
11927 copy_body_data cb;
11928 omp_context *ctx;
11931 static tree
11932 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11934 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11936 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11937 return create_tmp_var (TREE_TYPE (var));
11939 return var;
11942 static tree
11943 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11945 tree name, new_fields = NULL, type, f;
11947 type = lang_hooks.types.make_type (RECORD_TYPE);
11948 name = DECL_NAME (TYPE_NAME (orig_type));
11949 name = build_decl (gimple_location (tcctx->ctx->stmt),
11950 TYPE_DECL, name, type);
11951 TYPE_NAME (type) = name;
11953 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11955 tree new_f = copy_node (f);
11956 DECL_CONTEXT (new_f) = type;
11957 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11958 TREE_CHAIN (new_f) = new_fields;
11959 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11960 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11961 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11962 &tcctx->cb, NULL);
11963 new_fields = new_f;
11964 tcctx->cb.decl_map->put (f, new_f);
11966 TYPE_FIELDS (type) = nreverse (new_fields);
11967 layout_type (type);
11968 return type;
11971 /* Create task copyfn. */
11973 static void
11974 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11976 struct function *child_cfun;
11977 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11978 tree record_type, srecord_type, bind, list;
11979 bool record_needs_remap = false, srecord_needs_remap = false;
11980 splay_tree_node n;
11981 struct omp_taskcopy_context tcctx;
11982 location_t loc = gimple_location (task_stmt);
11983 size_t looptempno = 0;
11985 child_fn = gimple_omp_task_copy_fn (task_stmt);
11986 task_cpyfns.safe_push (task_stmt);
11987 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11988 gcc_assert (child_cfun->cfg == NULL);
11989 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11991 /* Reset DECL_CONTEXT on function arguments. */
11992 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11993 DECL_CONTEXT (t) = child_fn;
11995 /* Populate the function. */
11996 push_gimplify_context ();
11997 push_cfun (child_cfun);
11999 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12000 TREE_SIDE_EFFECTS (bind) = 1;
12001 list = NULL;
12002 DECL_SAVED_TREE (child_fn) = bind;
12003 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
12005 /* Remap src and dst argument types if needed. */
12006 record_type = ctx->record_type;
12007 srecord_type = ctx->srecord_type;
12008 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
12009 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12011 record_needs_remap = true;
12012 break;
12014 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
12015 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12017 srecord_needs_remap = true;
12018 break;
12021 if (record_needs_remap || srecord_needs_remap)
12023 memset (&tcctx, '\0', sizeof (tcctx));
12024 tcctx.cb.src_fn = ctx->cb.src_fn;
12025 tcctx.cb.dst_fn = child_fn;
12026 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12027 gcc_checking_assert (tcctx.cb.src_node);
12028 tcctx.cb.dst_node = tcctx.cb.src_node;
12029 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12030 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12031 tcctx.cb.eh_lp_nr = 0;
12032 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12033 tcctx.cb.decl_map = new hash_map<tree, tree>;
12034 tcctx.ctx = ctx;
12036 if (record_needs_remap)
12037 record_type = task_copyfn_remap_type (&tcctx, record_type);
12038 if (srecord_needs_remap)
12039 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12041 else
12042 tcctx.cb.decl_map = NULL;
12044 arg = DECL_ARGUMENTS (child_fn);
12045 TREE_TYPE (arg) = build_pointer_type (record_type);
12046 sarg = DECL_CHAIN (arg);
12047 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12049 /* First pass: initialize temporaries used in record_type and srecord_type
12050 sizes and field offsets. */
12051 if (tcctx.cb.decl_map)
12052 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12053 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12055 tree *p;
12057 decl = OMP_CLAUSE_DECL (c);
12058 p = tcctx.cb.decl_map->get (decl);
12059 if (p == NULL)
12060 continue;
12061 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12062 sf = (tree) n->value;
12063 sf = *tcctx.cb.decl_map->get (sf);
12064 src = build_simple_mem_ref_loc (loc, sarg);
12065 src = omp_build_component_ref (src, sf);
12066 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12067 append_to_statement_list (t, &list);
12070 /* Second pass: copy shared var pointers and copy construct non-VLA
12071 firstprivate vars. */
12072 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12073 switch (OMP_CLAUSE_CODE (c))
12075 splay_tree_key key;
12076 case OMP_CLAUSE_SHARED:
12077 decl = OMP_CLAUSE_DECL (c);
12078 key = (splay_tree_key) decl;
12079 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12080 key = (splay_tree_key) &DECL_UID (decl);
12081 n = splay_tree_lookup (ctx->field_map, key);
12082 if (n == NULL)
12083 break;
12084 f = (tree) n->value;
12085 if (tcctx.cb.decl_map)
12086 f = *tcctx.cb.decl_map->get (f);
12087 n = splay_tree_lookup (ctx->sfield_map, key);
12088 sf = (tree) n->value;
12089 if (tcctx.cb.decl_map)
12090 sf = *tcctx.cb.decl_map->get (sf);
12091 src = build_simple_mem_ref_loc (loc, sarg);
12092 src = omp_build_component_ref (src, sf);
12093 dst = build_simple_mem_ref_loc (loc, arg);
12094 dst = omp_build_component_ref (dst, f);
12095 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12096 append_to_statement_list (t, &list);
12097 break;
12098 case OMP_CLAUSE_REDUCTION:
12099 case OMP_CLAUSE_IN_REDUCTION:
12100 decl = OMP_CLAUSE_DECL (c);
12101 if (TREE_CODE (decl) == MEM_REF)
12103 decl = TREE_OPERAND (decl, 0);
12104 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12105 decl = TREE_OPERAND (decl, 0);
12106 if (TREE_CODE (decl) == INDIRECT_REF
12107 || TREE_CODE (decl) == ADDR_EXPR)
12108 decl = TREE_OPERAND (decl, 0);
12110 key = (splay_tree_key) decl;
12111 n = splay_tree_lookup (ctx->field_map, key);
12112 if (n == NULL)
12113 break;
12114 f = (tree) n->value;
12115 if (tcctx.cb.decl_map)
12116 f = *tcctx.cb.decl_map->get (f);
12117 n = splay_tree_lookup (ctx->sfield_map, key);
12118 sf = (tree) n->value;
12119 if (tcctx.cb.decl_map)
12120 sf = *tcctx.cb.decl_map->get (sf);
12121 src = build_simple_mem_ref_loc (loc, sarg);
12122 src = omp_build_component_ref (src, sf);
12123 if (decl != OMP_CLAUSE_DECL (c)
12124 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12125 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12126 src = build_simple_mem_ref_loc (loc, src);
12127 dst = build_simple_mem_ref_loc (loc, arg);
12128 dst = omp_build_component_ref (dst, f);
12129 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12130 append_to_statement_list (t, &list);
12131 break;
12132 case OMP_CLAUSE__LOOPTEMP_:
12133 /* Fields for first two _looptemp_ clauses are initialized by
12134 GOMP_taskloop*, the rest are handled like firstprivate. */
12135 if (looptempno < 2)
12137 looptempno++;
12138 break;
12140 /* FALLTHRU */
12141 case OMP_CLAUSE__REDUCTEMP_:
12142 case OMP_CLAUSE_FIRSTPRIVATE:
12143 decl = OMP_CLAUSE_DECL (c);
12144 if (is_variable_sized (decl))
12145 break;
12146 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12147 if (n == NULL)
12148 break;
12149 f = (tree) n->value;
12150 if (tcctx.cb.decl_map)
12151 f = *tcctx.cb.decl_map->get (f);
12152 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12153 if (n != NULL)
12155 sf = (tree) n->value;
12156 if (tcctx.cb.decl_map)
12157 sf = *tcctx.cb.decl_map->get (sf);
12158 src = build_simple_mem_ref_loc (loc, sarg);
12159 src = omp_build_component_ref (src, sf);
12160 if (use_pointer_for_field (decl, NULL)
12161 || omp_privatize_by_reference (decl))
12162 src = build_simple_mem_ref_loc (loc, src);
12164 else
12165 src = decl;
12166 dst = build_simple_mem_ref_loc (loc, arg);
12167 dst = omp_build_component_ref (dst, f);
12168 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12169 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12170 else
12172 if (ctx->allocate_map)
12173 if (tree *allocatorp = ctx->allocate_map->get (decl))
12175 tree allocator = *allocatorp;
12176 HOST_WIDE_INT ialign = 0;
12177 if (TREE_CODE (allocator) == TREE_LIST)
12179 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12180 allocator = TREE_PURPOSE (allocator);
12182 if (TREE_CODE (allocator) != INTEGER_CST)
12184 n = splay_tree_lookup (ctx->sfield_map,
12185 (splay_tree_key) allocator);
12186 allocator = (tree) n->value;
12187 if (tcctx.cb.decl_map)
12188 allocator = *tcctx.cb.decl_map->get (allocator);
12189 tree a = build_simple_mem_ref_loc (loc, sarg);
12190 allocator = omp_build_component_ref (a, allocator);
12192 allocator = fold_convert (pointer_sized_int_node, allocator);
12193 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12194 tree align = build_int_cst (size_type_node,
12195 MAX (ialign,
12196 DECL_ALIGN_UNIT (decl)));
12197 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12198 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12199 allocator);
12200 ptr = fold_convert (TREE_TYPE (dst), ptr);
12201 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12202 append_to_statement_list (t, &list);
12203 dst = build_simple_mem_ref_loc (loc, dst);
12205 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12207 append_to_statement_list (t, &list);
12208 break;
12209 case OMP_CLAUSE_PRIVATE:
12210 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12211 break;
12212 decl = OMP_CLAUSE_DECL (c);
12213 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12214 f = (tree) n->value;
12215 if (tcctx.cb.decl_map)
12216 f = *tcctx.cb.decl_map->get (f);
12217 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12218 if (n != NULL)
12220 sf = (tree) n->value;
12221 if (tcctx.cb.decl_map)
12222 sf = *tcctx.cb.decl_map->get (sf);
12223 src = build_simple_mem_ref_loc (loc, sarg);
12224 src = omp_build_component_ref (src, sf);
12225 if (use_pointer_for_field (decl, NULL))
12226 src = build_simple_mem_ref_loc (loc, src);
12228 else
12229 src = decl;
12230 dst = build_simple_mem_ref_loc (loc, arg);
12231 dst = omp_build_component_ref (dst, f);
12232 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12233 append_to_statement_list (t, &list);
12234 break;
12235 default:
12236 break;
12239 /* Last pass: handle VLA firstprivates. */
12240 if (tcctx.cb.decl_map)
12241 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12242 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12244 tree ind, ptr, df;
12246 decl = OMP_CLAUSE_DECL (c);
12247 if (!is_variable_sized (decl))
12248 continue;
12249 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12250 if (n == NULL)
12251 continue;
12252 f = (tree) n->value;
12253 f = *tcctx.cb.decl_map->get (f);
12254 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12255 ind = DECL_VALUE_EXPR (decl);
12256 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12257 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12258 n = splay_tree_lookup (ctx->sfield_map,
12259 (splay_tree_key) TREE_OPERAND (ind, 0));
12260 sf = (tree) n->value;
12261 sf = *tcctx.cb.decl_map->get (sf);
12262 src = build_simple_mem_ref_loc (loc, sarg);
12263 src = omp_build_component_ref (src, sf);
12264 src = build_simple_mem_ref_loc (loc, src);
12265 dst = build_simple_mem_ref_loc (loc, arg);
12266 dst = omp_build_component_ref (dst, f);
12267 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12268 append_to_statement_list (t, &list);
12269 n = splay_tree_lookup (ctx->field_map,
12270 (splay_tree_key) TREE_OPERAND (ind, 0));
12271 df = (tree) n->value;
12272 df = *tcctx.cb.decl_map->get (df);
12273 ptr = build_simple_mem_ref_loc (loc, arg);
12274 ptr = omp_build_component_ref (ptr, df);
12275 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12276 build_fold_addr_expr_loc (loc, dst));
12277 append_to_statement_list (t, &list);
12280 t = build1 (RETURN_EXPR, void_type_node, NULL);
12281 append_to_statement_list (t, &list);
12283 if (tcctx.cb.decl_map)
12284 delete tcctx.cb.decl_map;
12285 pop_gimplify_context (NULL);
12286 BIND_EXPR_BODY (bind) = list;
12287 pop_cfun ();
12290 static void
12291 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12293 tree c, clauses;
12294 gimple *g;
12295 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12297 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12298 gcc_assert (clauses);
12299 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12300 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12301 switch (OMP_CLAUSE_DEPEND_KIND (c))
12303 case OMP_CLAUSE_DEPEND_LAST:
12304 /* Lowering already done at gimplification. */
12305 return;
12306 case OMP_CLAUSE_DEPEND_IN:
12307 cnt[2]++;
12308 break;
12309 case OMP_CLAUSE_DEPEND_OUT:
12310 case OMP_CLAUSE_DEPEND_INOUT:
12311 cnt[0]++;
12312 break;
12313 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12314 cnt[1]++;
12315 break;
12316 case OMP_CLAUSE_DEPEND_DEPOBJ:
12317 cnt[3]++;
12318 break;
12319 case OMP_CLAUSE_DEPEND_SOURCE:
12320 case OMP_CLAUSE_DEPEND_SINK:
12321 /* FALLTHRU */
12322 default:
12323 gcc_unreachable ();
12325 if (cnt[1] || cnt[3])
12326 idx = 5;
12327 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12328 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12329 tree array = create_tmp_var (type);
12330 TREE_ADDRESSABLE (array) = 1;
12331 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12332 NULL_TREE);
12333 if (idx == 5)
12335 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12336 gimple_seq_add_stmt (iseq, g);
12337 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12338 NULL_TREE);
12340 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12341 gimple_seq_add_stmt (iseq, g);
12342 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12344 r = build4 (ARRAY_REF, ptr_type_node, array,
12345 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12346 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12347 gimple_seq_add_stmt (iseq, g);
12349 for (i = 0; i < 4; i++)
12351 if (cnt[i] == 0)
12352 continue;
12353 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12354 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12355 continue;
12356 else
12358 switch (OMP_CLAUSE_DEPEND_KIND (c))
12360 case OMP_CLAUSE_DEPEND_IN:
12361 if (i != 2)
12362 continue;
12363 break;
12364 case OMP_CLAUSE_DEPEND_OUT:
12365 case OMP_CLAUSE_DEPEND_INOUT:
12366 if (i != 0)
12367 continue;
12368 break;
12369 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12370 if (i != 1)
12371 continue;
12372 break;
12373 case OMP_CLAUSE_DEPEND_DEPOBJ:
12374 if (i != 3)
12375 continue;
12376 break;
12377 default:
12378 gcc_unreachable ();
12380 tree t = OMP_CLAUSE_DECL (c);
12381 t = fold_convert (ptr_type_node, t);
12382 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12383 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12384 NULL_TREE, NULL_TREE);
12385 g = gimple_build_assign (r, t);
12386 gimple_seq_add_stmt (iseq, g);
12389 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12390 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12391 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12392 OMP_CLAUSE_CHAIN (c) = *pclauses;
12393 *pclauses = c;
12394 tree clobber = build_clobber (type);
12395 g = gimple_build_assign (array, clobber);
12396 gimple_seq_add_stmt (oseq, g);
12399 /* Lower the OpenMP parallel or task directive in the current statement
12400 in GSI_P. CTX holds context information for the directive. */
12402 static void
12403 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12405 tree clauses;
12406 tree child_fn, t;
12407 gimple *stmt = gsi_stmt (*gsi_p);
12408 gbind *par_bind, *bind, *dep_bind = NULL;
12409 gimple_seq par_body;
12410 location_t loc = gimple_location (stmt);
12412 clauses = gimple_omp_taskreg_clauses (stmt);
12413 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12414 && gimple_omp_task_taskwait_p (stmt))
12416 par_bind = NULL;
12417 par_body = NULL;
12419 else
12421 par_bind
12422 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12423 par_body = gimple_bind_body (par_bind);
12425 child_fn = ctx->cb.dst_fn;
12426 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12427 && !gimple_omp_parallel_combined_p (stmt))
12429 struct walk_stmt_info wi;
12430 int ws_num = 0;
12432 memset (&wi, 0, sizeof (wi));
12433 wi.info = &ws_num;
12434 wi.val_only = true;
12435 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12436 if (ws_num == 1)
12437 gimple_omp_parallel_set_combined_p (stmt, true);
12439 gimple_seq dep_ilist = NULL;
12440 gimple_seq dep_olist = NULL;
12441 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12442 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12444 push_gimplify_context ();
12445 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12446 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12447 &dep_ilist, &dep_olist);
12450 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12451 && gimple_omp_task_taskwait_p (stmt))
12453 if (dep_bind)
12455 gsi_replace (gsi_p, dep_bind, true);
12456 gimple_bind_add_seq (dep_bind, dep_ilist);
12457 gimple_bind_add_stmt (dep_bind, stmt);
12458 gimple_bind_add_seq (dep_bind, dep_olist);
12459 pop_gimplify_context (dep_bind);
12461 return;
12464 if (ctx->srecord_type)
12465 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12467 gimple_seq tskred_ilist = NULL;
12468 gimple_seq tskred_olist = NULL;
12469 if ((is_task_ctx (ctx)
12470 && gimple_omp_task_taskloop_p (ctx->stmt)
12471 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12472 OMP_CLAUSE_REDUCTION))
12473 || (is_parallel_ctx (ctx)
12474 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12475 OMP_CLAUSE__REDUCTEMP_)))
12477 if (dep_bind == NULL)
12479 push_gimplify_context ();
12480 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12482 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12483 : OMP_PARALLEL,
12484 gimple_omp_taskreg_clauses (ctx->stmt),
12485 &tskred_ilist, &tskred_olist);
12488 push_gimplify_context ();
12490 gimple_seq par_olist = NULL;
12491 gimple_seq par_ilist = NULL;
12492 gimple_seq par_rlist = NULL;
12493 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12494 lower_omp (&par_body, ctx);
12495 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12496 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12498 /* Declare all the variables created by mapping and the variables
12499 declared in the scope of the parallel body. */
12500 record_vars_into (ctx->block_vars, child_fn);
12501 maybe_remove_omp_member_access_dummy_vars (par_bind);
12502 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12504 if (ctx->record_type)
12506 ctx->sender_decl
12507 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12508 : ctx->record_type, ".omp_data_o");
12509 DECL_NAMELESS (ctx->sender_decl) = 1;
12510 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12511 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12514 gimple_seq olist = NULL;
12515 gimple_seq ilist = NULL;
12516 lower_send_clauses (clauses, &ilist, &olist, ctx);
12517 lower_send_shared_vars (&ilist, &olist, ctx);
12519 if (ctx->record_type)
12521 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12522 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12523 clobber));
12526 /* Once all the expansions are done, sequence all the different
12527 fragments inside gimple_omp_body. */
12529 gimple_seq new_body = NULL;
12531 if (ctx->record_type)
12533 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12534 /* fixup_child_record_type might have changed receiver_decl's type. */
12535 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12536 gimple_seq_add_stmt (&new_body,
12537 gimple_build_assign (ctx->receiver_decl, t));
12540 gimple_seq_add_seq (&new_body, par_ilist);
12541 gimple_seq_add_seq (&new_body, par_body);
12542 gimple_seq_add_seq (&new_body, par_rlist);
12543 if (ctx->cancellable)
12544 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12545 gimple_seq_add_seq (&new_body, par_olist);
12546 new_body = maybe_catch_exception (new_body);
12547 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12548 gimple_seq_add_stmt (&new_body,
12549 gimple_build_omp_continue (integer_zero_node,
12550 integer_zero_node));
12551 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12552 gimple_omp_set_body (stmt, new_body);
12554 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12555 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12556 else
12557 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12558 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12559 gimple_bind_add_seq (bind, ilist);
12560 gimple_bind_add_stmt (bind, stmt);
12561 gimple_bind_add_seq (bind, olist);
12563 pop_gimplify_context (NULL);
12565 if (dep_bind)
12567 gimple_bind_add_seq (dep_bind, dep_ilist);
12568 gimple_bind_add_seq (dep_bind, tskred_ilist);
12569 gimple_bind_add_stmt (dep_bind, bind);
12570 gimple_bind_add_seq (dep_bind, tskred_olist);
12571 gimple_bind_add_seq (dep_bind, dep_olist);
12572 pop_gimplify_context (dep_bind);
12576 /* Lower the GIMPLE_OMP_TARGET in the current statement
12577 in GSI_P. CTX holds context information for the directive. */
12579 static void
12580 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12582 tree clauses;
12583 tree child_fn, t, c;
12584 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12585 gbind *tgt_bind, *bind, *dep_bind = NULL;
12586 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12587 location_t loc = gimple_location (stmt);
12588 bool offloaded, data_region;
12589 unsigned int map_cnt = 0;
12590 tree in_reduction_clauses = NULL_TREE;
12592 offloaded = is_gimple_omp_offloaded (stmt);
12593 switch (gimple_omp_target_kind (stmt))
12595 case GF_OMP_TARGET_KIND_REGION:
12596 tree *p, *q;
12597 q = &in_reduction_clauses;
12598 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12599 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12601 *q = *p;
12602 q = &OMP_CLAUSE_CHAIN (*q);
12603 *p = OMP_CLAUSE_CHAIN (*p);
12605 else
12606 p = &OMP_CLAUSE_CHAIN (*p);
12607 *q = NULL_TREE;
12608 *p = in_reduction_clauses;
12609 /* FALLTHRU */
12610 case GF_OMP_TARGET_KIND_UPDATE:
12611 case GF_OMP_TARGET_KIND_ENTER_DATA:
12612 case GF_OMP_TARGET_KIND_EXIT_DATA:
12613 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12614 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12615 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12616 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12617 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12618 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12619 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12620 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12621 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12622 data_region = false;
12623 break;
12624 case GF_OMP_TARGET_KIND_DATA:
12625 case GF_OMP_TARGET_KIND_OACC_DATA:
12626 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12627 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12628 data_region = true;
12629 break;
12630 default:
12631 gcc_unreachable ();
12634 clauses = gimple_omp_target_clauses (stmt);
12636 gimple_seq dep_ilist = NULL;
12637 gimple_seq dep_olist = NULL;
12638 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12639 if (has_depend || in_reduction_clauses)
12641 push_gimplify_context ();
12642 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12643 if (has_depend)
12644 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12645 &dep_ilist, &dep_olist);
12646 if (in_reduction_clauses)
12647 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12648 ctx, NULL);
12651 tgt_bind = NULL;
12652 tgt_body = NULL;
12653 if (offloaded)
12655 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12656 tgt_body = gimple_bind_body (tgt_bind);
12658 else if (data_region)
12659 tgt_body = gimple_omp_body (stmt);
12660 child_fn = ctx->cb.dst_fn;
12662 push_gimplify_context ();
12663 fplist = NULL;
12665 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12666 switch (OMP_CLAUSE_CODE (c))
12668 tree var, x;
12670 default:
12671 break;
12672 case OMP_CLAUSE_MAP:
12673 #if CHECKING_P
12674 /* First check what we're prepared to handle in the following. */
12675 switch (OMP_CLAUSE_MAP_KIND (c))
12677 case GOMP_MAP_ALLOC:
12678 case GOMP_MAP_TO:
12679 case GOMP_MAP_FROM:
12680 case GOMP_MAP_TOFROM:
12681 case GOMP_MAP_POINTER:
12682 case GOMP_MAP_TO_PSET:
12683 case GOMP_MAP_DELETE:
12684 case GOMP_MAP_RELEASE:
12685 case GOMP_MAP_ALWAYS_TO:
12686 case GOMP_MAP_ALWAYS_FROM:
12687 case GOMP_MAP_ALWAYS_TOFROM:
12688 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12689 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12690 case GOMP_MAP_STRUCT:
12691 case GOMP_MAP_ALWAYS_POINTER:
12692 case GOMP_MAP_ATTACH:
12693 case GOMP_MAP_DETACH:
12694 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12695 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12696 break;
12697 case GOMP_MAP_IF_PRESENT:
12698 case GOMP_MAP_FORCE_ALLOC:
12699 case GOMP_MAP_FORCE_TO:
12700 case GOMP_MAP_FORCE_FROM:
12701 case GOMP_MAP_FORCE_TOFROM:
12702 case GOMP_MAP_FORCE_PRESENT:
12703 case GOMP_MAP_FORCE_DEVICEPTR:
12704 case GOMP_MAP_DEVICE_RESIDENT:
12705 case GOMP_MAP_LINK:
12706 case GOMP_MAP_FORCE_DETACH:
12707 gcc_assert (is_gimple_omp_oacc (stmt));
12708 break;
12709 default:
12710 gcc_unreachable ();
12712 #endif
12713 /* FALLTHRU */
12714 case OMP_CLAUSE_TO:
12715 case OMP_CLAUSE_FROM:
12716 oacc_firstprivate:
12717 var = OMP_CLAUSE_DECL (c);
12718 if (!DECL_P (var))
12720 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12721 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12722 && (OMP_CLAUSE_MAP_KIND (c)
12723 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12724 map_cnt++;
12725 continue;
12728 if (DECL_SIZE (var)
12729 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12731 tree var2 = DECL_VALUE_EXPR (var);
12732 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12733 var2 = TREE_OPERAND (var2, 0);
12734 gcc_assert (DECL_P (var2));
12735 var = var2;
12738 if (offloaded
12739 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12740 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12741 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12743 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12745 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12746 && varpool_node::get_create (var)->offloadable)
12747 continue;
12749 tree type = build_pointer_type (TREE_TYPE (var));
12750 tree new_var = lookup_decl (var, ctx);
12751 x = create_tmp_var_raw (type, get_name (new_var));
12752 gimple_add_tmp_var (x);
12753 x = build_simple_mem_ref (x);
12754 SET_DECL_VALUE_EXPR (new_var, x);
12755 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12757 continue;
12760 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12761 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12762 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12763 && is_omp_target (stmt))
12765 gcc_assert (maybe_lookup_field (c, ctx));
12766 map_cnt++;
12767 continue;
12770 if (!maybe_lookup_field (var, ctx))
12771 continue;
12773 /* Don't remap compute constructs' reduction variables, because the
12774 intermediate result must be local to each gang. */
12775 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12776 && is_gimple_omp_oacc (ctx->stmt)
12777 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12779 x = build_receiver_ref (var, true, ctx);
12780 tree new_var = lookup_decl (var, ctx);
12782 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12783 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12784 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12785 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12786 x = build_simple_mem_ref (x);
12787 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12789 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12790 if (omp_privatize_by_reference (new_var)
12791 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12792 || DECL_BY_REFERENCE (var)))
12794 /* Create a local object to hold the instance
12795 value. */
12796 tree type = TREE_TYPE (TREE_TYPE (new_var));
12797 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12798 tree inst = create_tmp_var (type, id);
12799 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12800 x = build_fold_addr_expr (inst);
12802 gimplify_assign (new_var, x, &fplist);
12804 else if (DECL_P (new_var))
12806 SET_DECL_VALUE_EXPR (new_var, x);
12807 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12809 else
12810 gcc_unreachable ();
12812 map_cnt++;
12813 break;
12815 case OMP_CLAUSE_FIRSTPRIVATE:
12816 gcc_checking_assert (offloaded);
12817 if (is_gimple_omp_oacc (ctx->stmt))
12819 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12820 gcc_checking_assert (!is_oacc_kernels (ctx));
12821 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12822 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12824 goto oacc_firstprivate;
12826 map_cnt++;
12827 var = OMP_CLAUSE_DECL (c);
12828 if (!omp_privatize_by_reference (var)
12829 && !is_gimple_reg_type (TREE_TYPE (var)))
12831 tree new_var = lookup_decl (var, ctx);
12832 if (is_variable_sized (var))
12834 tree pvar = DECL_VALUE_EXPR (var);
12835 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12836 pvar = TREE_OPERAND (pvar, 0);
12837 gcc_assert (DECL_P (pvar));
12838 tree new_pvar = lookup_decl (pvar, ctx);
12839 x = build_fold_indirect_ref (new_pvar);
12840 TREE_THIS_NOTRAP (x) = 1;
12842 else
12843 x = build_receiver_ref (var, true, ctx);
12844 SET_DECL_VALUE_EXPR (new_var, x);
12845 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12847 break;
12849 case OMP_CLAUSE_PRIVATE:
12850 gcc_checking_assert (offloaded);
12851 if (is_gimple_omp_oacc (ctx->stmt))
12853 /* No 'private' clauses on OpenACC 'kernels'. */
12854 gcc_checking_assert (!is_oacc_kernels (ctx));
12855 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12856 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12858 break;
12860 var = OMP_CLAUSE_DECL (c);
12861 if (is_variable_sized (var))
12863 tree new_var = lookup_decl (var, ctx);
12864 tree pvar = DECL_VALUE_EXPR (var);
12865 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12866 pvar = TREE_OPERAND (pvar, 0);
12867 gcc_assert (DECL_P (pvar));
12868 tree new_pvar = lookup_decl (pvar, ctx);
12869 x = build_fold_indirect_ref (new_pvar);
12870 TREE_THIS_NOTRAP (x) = 1;
12871 SET_DECL_VALUE_EXPR (new_var, x);
12872 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12874 break;
12876 case OMP_CLAUSE_USE_DEVICE_PTR:
12877 case OMP_CLAUSE_USE_DEVICE_ADDR:
12878 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12879 case OMP_CLAUSE_IS_DEVICE_PTR:
12880 var = OMP_CLAUSE_DECL (c);
12881 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12883 while (TREE_CODE (var) == INDIRECT_REF
12884 || TREE_CODE (var) == ARRAY_REF)
12885 var = TREE_OPERAND (var, 0);
12887 map_cnt++;
12888 if (is_variable_sized (var))
12890 tree new_var = lookup_decl (var, ctx);
12891 tree pvar = DECL_VALUE_EXPR (var);
12892 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12893 pvar = TREE_OPERAND (pvar, 0);
12894 gcc_assert (DECL_P (pvar));
12895 tree new_pvar = lookup_decl (pvar, ctx);
12896 x = build_fold_indirect_ref (new_pvar);
12897 TREE_THIS_NOTRAP (x) = 1;
12898 SET_DECL_VALUE_EXPR (new_var, x);
12899 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12901 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12902 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12903 && !omp_privatize_by_reference (var)
12904 && !omp_is_allocatable_or_ptr (var)
12905 && !lang_hooks.decls.omp_array_data (var, true))
12906 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12908 tree new_var = lookup_decl (var, ctx);
12909 tree type = build_pointer_type (TREE_TYPE (var));
12910 x = create_tmp_var_raw (type, get_name (new_var));
12911 gimple_add_tmp_var (x);
12912 x = build_simple_mem_ref (x);
12913 SET_DECL_VALUE_EXPR (new_var, x);
12914 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12916 else
12918 tree new_var = lookup_decl (var, ctx);
12919 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12920 gimple_add_tmp_var (x);
12921 SET_DECL_VALUE_EXPR (new_var, x);
12922 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12924 break;
12927 if (offloaded)
12929 target_nesting_level++;
12930 lower_omp (&tgt_body, ctx);
12931 target_nesting_level--;
12933 else if (data_region)
12934 lower_omp (&tgt_body, ctx);
12936 if (offloaded)
12938 /* Declare all the variables created by mapping and the variables
12939 declared in the scope of the target body. */
12940 record_vars_into (ctx->block_vars, child_fn);
12941 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12942 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12945 olist = NULL;
12946 ilist = NULL;
12947 if (ctx->record_type)
12949 ctx->sender_decl
12950 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12951 DECL_NAMELESS (ctx->sender_decl) = 1;
12952 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12953 t = make_tree_vec (3);
12954 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12955 TREE_VEC_ELT (t, 1)
12956 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12957 ".omp_data_sizes");
12958 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12959 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12960 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12961 tree tkind_type = short_unsigned_type_node;
12962 int talign_shift = 8;
12963 TREE_VEC_ELT (t, 2)
12964 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12965 ".omp_data_kinds");
12966 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12967 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12968 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12969 gimple_omp_target_set_data_arg (stmt, t);
12971 vec<constructor_elt, va_gc> *vsize;
12972 vec<constructor_elt, va_gc> *vkind;
12973 vec_alloc (vsize, map_cnt);
12974 vec_alloc (vkind, map_cnt);
12975 unsigned int map_idx = 0;
12977 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12978 switch (OMP_CLAUSE_CODE (c))
12980 tree ovar, nc, s, purpose, var, x, type;
12981 unsigned int talign;
12983 default:
12984 break;
12986 case OMP_CLAUSE_MAP:
12987 case OMP_CLAUSE_TO:
12988 case OMP_CLAUSE_FROM:
12989 oacc_firstprivate_map:
12990 nc = c;
12991 ovar = OMP_CLAUSE_DECL (c);
12992 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12993 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12994 || (OMP_CLAUSE_MAP_KIND (c)
12995 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12996 break;
12997 if (!DECL_P (ovar))
12999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13000 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13002 nc = OMP_CLAUSE_CHAIN (c);
13003 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13004 == get_base_address (ovar));
13005 ovar = OMP_CLAUSE_DECL (nc);
13007 else
13009 tree x = build_sender_ref (ovar, ctx);
13010 tree v = ovar;
13011 if (in_reduction_clauses
13012 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13013 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13015 v = unshare_expr (v);
13016 tree *p = &v;
13017 while (handled_component_p (*p)
13018 || TREE_CODE (*p) == INDIRECT_REF
13019 || TREE_CODE (*p) == ADDR_EXPR
13020 || TREE_CODE (*p) == MEM_REF
13021 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13022 p = &TREE_OPERAND (*p, 0);
13023 tree d = *p;
13024 if (is_variable_sized (d))
13026 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13027 d = DECL_VALUE_EXPR (d);
13028 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13029 d = TREE_OPERAND (d, 0);
13030 gcc_assert (DECL_P (d));
13032 splay_tree_key key
13033 = (splay_tree_key) &DECL_CONTEXT (d);
13034 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13035 key)->value;
13036 if (d == *p)
13037 *p = nd;
13038 else
13039 *p = build_fold_indirect_ref (nd);
13041 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13042 gimplify_assign (x, v, &ilist);
13043 nc = NULL_TREE;
13046 else
13048 if (DECL_SIZE (ovar)
13049 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13051 tree ovar2 = DECL_VALUE_EXPR (ovar);
13052 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13053 ovar2 = TREE_OPERAND (ovar2, 0);
13054 gcc_assert (DECL_P (ovar2));
13055 ovar = ovar2;
13057 if (!maybe_lookup_field (ovar, ctx)
13058 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13059 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13060 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13061 continue;
13064 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13065 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13066 talign = DECL_ALIGN_UNIT (ovar);
13068 var = NULL_TREE;
13069 if (nc)
13071 if (in_reduction_clauses
13072 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13073 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13075 tree d = ovar;
13076 if (is_variable_sized (d))
13078 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13079 d = DECL_VALUE_EXPR (d);
13080 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13081 d = TREE_OPERAND (d, 0);
13082 gcc_assert (DECL_P (d));
13084 splay_tree_key key
13085 = (splay_tree_key) &DECL_CONTEXT (d);
13086 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13087 key)->value;
13088 if (d == ovar)
13089 var = nd;
13090 else
13091 var = build_fold_indirect_ref (nd);
13093 else
13094 var = lookup_decl_in_outer_ctx (ovar, ctx);
13096 if (nc
13097 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13098 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13099 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13100 && is_omp_target (stmt))
13102 x = build_sender_ref (c, ctx);
13103 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13105 else if (nc)
13107 x = build_sender_ref (ovar, ctx);
13109 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13110 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13111 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13112 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13114 gcc_assert (offloaded);
13115 tree avar
13116 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13117 mark_addressable (avar);
13118 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13119 talign = DECL_ALIGN_UNIT (avar);
13120 avar = build_fold_addr_expr (avar);
13121 gimplify_assign (x, avar, &ilist);
13123 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13125 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13126 if (!omp_privatize_by_reference (var))
13128 if (is_gimple_reg (var)
13129 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13130 suppress_warning (var);
13131 var = build_fold_addr_expr (var);
13133 else
13134 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13135 gimplify_assign (x, var, &ilist);
13137 else if (is_gimple_reg (var))
13139 gcc_assert (offloaded);
13140 tree avar = create_tmp_var (TREE_TYPE (var));
13141 mark_addressable (avar);
13142 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13143 if (GOMP_MAP_COPY_TO_P (map_kind)
13144 || map_kind == GOMP_MAP_POINTER
13145 || map_kind == GOMP_MAP_TO_PSET
13146 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13148 /* If we need to initialize a temporary
13149 with VAR because it is not addressable, and
13150 the variable hasn't been initialized yet, then
13151 we'll get a warning for the store to avar.
13152 Don't warn in that case, the mapping might
13153 be implicit. */
13154 suppress_warning (var, OPT_Wuninitialized);
13155 gimplify_assign (avar, var, &ilist);
13157 avar = build_fold_addr_expr (avar);
13158 gimplify_assign (x, avar, &ilist);
13159 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13160 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13161 && !TYPE_READONLY (TREE_TYPE (var)))
13163 x = unshare_expr (x);
13164 x = build_simple_mem_ref (x);
13165 gimplify_assign (var, x, &olist);
13168 else
13170 /* While MAP is handled explicitly by the FE,
13171 for 'target update', only the identified is passed. */
13172 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13173 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13174 && (omp_is_allocatable_or_ptr (var)
13175 && omp_check_optional_argument (var, false)))
13176 var = build_fold_indirect_ref (var);
13177 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13178 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13179 || (!omp_is_allocatable_or_ptr (var)
13180 && !omp_check_optional_argument (var, false)))
13181 var = build_fold_addr_expr (var);
13182 gimplify_assign (x, var, &ilist);
13185 s = NULL_TREE;
13186 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13188 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13189 s = TREE_TYPE (ovar);
13190 if (TREE_CODE (s) == REFERENCE_TYPE
13191 || omp_check_optional_argument (ovar, false))
13192 s = TREE_TYPE (s);
13193 s = TYPE_SIZE_UNIT (s);
13195 else
13196 s = OMP_CLAUSE_SIZE (c);
13197 if (s == NULL_TREE)
13198 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13199 s = fold_convert (size_type_node, s);
13200 purpose = size_int (map_idx++);
13201 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13202 if (TREE_CODE (s) != INTEGER_CST)
13203 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13205 unsigned HOST_WIDE_INT tkind, tkind_zero;
13206 switch (OMP_CLAUSE_CODE (c))
13208 case OMP_CLAUSE_MAP:
13209 tkind = OMP_CLAUSE_MAP_KIND (c);
13210 tkind_zero = tkind;
13211 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13212 switch (tkind)
13214 case GOMP_MAP_ALLOC:
13215 case GOMP_MAP_IF_PRESENT:
13216 case GOMP_MAP_TO:
13217 case GOMP_MAP_FROM:
13218 case GOMP_MAP_TOFROM:
13219 case GOMP_MAP_ALWAYS_TO:
13220 case GOMP_MAP_ALWAYS_FROM:
13221 case GOMP_MAP_ALWAYS_TOFROM:
13222 case GOMP_MAP_RELEASE:
13223 case GOMP_MAP_FORCE_TO:
13224 case GOMP_MAP_FORCE_FROM:
13225 case GOMP_MAP_FORCE_TOFROM:
13226 case GOMP_MAP_FORCE_PRESENT:
13227 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13228 break;
13229 case GOMP_MAP_DELETE:
13230 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13231 default:
13232 break;
13234 if (tkind_zero != tkind)
13236 if (integer_zerop (s))
13237 tkind = tkind_zero;
13238 else if (integer_nonzerop (s))
13239 tkind_zero = tkind;
13241 if (tkind_zero == tkind
13242 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13243 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13244 & ~GOMP_MAP_IMPLICIT)
13245 == 0))
13247 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13248 bits are not interfered by other special bit encodings,
13249 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13250 to see. */
13251 tkind |= GOMP_MAP_IMPLICIT;
13252 tkind_zero = tkind;
13254 break;
13255 case OMP_CLAUSE_FIRSTPRIVATE:
13256 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13257 tkind = GOMP_MAP_TO;
13258 tkind_zero = tkind;
13259 break;
13260 case OMP_CLAUSE_TO:
13261 tkind = GOMP_MAP_TO;
13262 tkind_zero = tkind;
13263 break;
13264 case OMP_CLAUSE_FROM:
13265 tkind = GOMP_MAP_FROM;
13266 tkind_zero = tkind;
13267 break;
13268 default:
13269 gcc_unreachable ();
13271 gcc_checking_assert (tkind
13272 < (HOST_WIDE_INT_C (1U) << talign_shift));
13273 gcc_checking_assert (tkind_zero
13274 < (HOST_WIDE_INT_C (1U) << talign_shift));
13275 talign = ceil_log2 (talign);
13276 tkind |= talign << talign_shift;
13277 tkind_zero |= talign << talign_shift;
13278 gcc_checking_assert (tkind
13279 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13280 gcc_checking_assert (tkind_zero
13281 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13282 if (tkind == tkind_zero)
13283 x = build_int_cstu (tkind_type, tkind);
13284 else
13286 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13287 x = build3 (COND_EXPR, tkind_type,
13288 fold_build2 (EQ_EXPR, boolean_type_node,
13289 unshare_expr (s), size_zero_node),
13290 build_int_cstu (tkind_type, tkind_zero),
13291 build_int_cstu (tkind_type, tkind));
13293 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13294 if (nc && nc != c)
13295 c = nc;
13296 break;
13298 case OMP_CLAUSE_FIRSTPRIVATE:
13299 if (is_gimple_omp_oacc (ctx->stmt))
13300 goto oacc_firstprivate_map;
13301 ovar = OMP_CLAUSE_DECL (c);
13302 if (omp_privatize_by_reference (ovar))
13303 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13304 else
13305 talign = DECL_ALIGN_UNIT (ovar);
13306 var = lookup_decl_in_outer_ctx (ovar, ctx);
13307 x = build_sender_ref (ovar, ctx);
13308 tkind = GOMP_MAP_FIRSTPRIVATE;
13309 type = TREE_TYPE (ovar);
13310 if (omp_privatize_by_reference (ovar))
13311 type = TREE_TYPE (type);
13312 if ((INTEGRAL_TYPE_P (type)
13313 && TYPE_PRECISION (type) <= POINTER_SIZE)
13314 || TREE_CODE (type) == POINTER_TYPE)
13316 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13317 tree t = var;
13318 if (omp_privatize_by_reference (var))
13319 t = build_simple_mem_ref (var);
13320 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13321 suppress_warning (var);
13322 if (TREE_CODE (type) != POINTER_TYPE)
13323 t = fold_convert (pointer_sized_int_node, t);
13324 t = fold_convert (TREE_TYPE (x), t);
13325 gimplify_assign (x, t, &ilist);
13327 else if (omp_privatize_by_reference (var))
13328 gimplify_assign (x, var, &ilist);
13329 else if (is_gimple_reg (var))
13331 tree avar = create_tmp_var (TREE_TYPE (var));
13332 mark_addressable (avar);
13333 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13334 suppress_warning (var);
13335 gimplify_assign (avar, var, &ilist);
13336 avar = build_fold_addr_expr (avar);
13337 gimplify_assign (x, avar, &ilist);
13339 else
13341 var = build_fold_addr_expr (var);
13342 gimplify_assign (x, var, &ilist);
13344 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13345 s = size_int (0);
13346 else if (omp_privatize_by_reference (ovar))
13347 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13348 else
13349 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13350 s = fold_convert (size_type_node, s);
13351 purpose = size_int (map_idx++);
13352 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13353 if (TREE_CODE (s) != INTEGER_CST)
13354 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13356 gcc_checking_assert (tkind
13357 < (HOST_WIDE_INT_C (1U) << talign_shift));
13358 talign = ceil_log2 (talign);
13359 tkind |= talign << talign_shift;
13360 gcc_checking_assert (tkind
13361 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13362 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13363 build_int_cstu (tkind_type, tkind));
13364 break;
13366 case OMP_CLAUSE_USE_DEVICE_PTR:
13367 case OMP_CLAUSE_USE_DEVICE_ADDR:
13368 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13369 case OMP_CLAUSE_IS_DEVICE_PTR:
13370 ovar = OMP_CLAUSE_DECL (c);
13371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13373 while (TREE_CODE (ovar) == INDIRECT_REF
13374 || TREE_CODE (ovar) == ARRAY_REF)
13375 ovar = TREE_OPERAND (ovar, 0);
13377 var = lookup_decl_in_outer_ctx (ovar, ctx);
13379 if (lang_hooks.decls.omp_array_data (ovar, true))
13381 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13382 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13383 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13384 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13386 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13387 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13389 tkind = GOMP_MAP_USE_DEVICE_PTR;
13390 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13392 else
13394 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13395 x = build_sender_ref (ovar, ctx);
13398 if (is_gimple_omp_oacc (ctx->stmt))
13400 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13402 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13403 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13406 type = TREE_TYPE (ovar);
13407 if (lang_hooks.decls.omp_array_data (ovar, true))
13408 var = lang_hooks.decls.omp_array_data (ovar, false);
13409 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13410 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13411 && !omp_privatize_by_reference (ovar)
13412 && !omp_is_allocatable_or_ptr (ovar))
13413 || TREE_CODE (type) == ARRAY_TYPE)
13414 var = build_fold_addr_expr (var);
13415 else
13417 if (omp_privatize_by_reference (ovar)
13418 || omp_check_optional_argument (ovar, false)
13419 || omp_is_allocatable_or_ptr (ovar))
13421 type = TREE_TYPE (type);
13422 if (POINTER_TYPE_P (type)
13423 && TREE_CODE (type) != ARRAY_TYPE
13424 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13425 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13426 && !omp_is_allocatable_or_ptr (ovar))
13427 || (omp_privatize_by_reference (ovar)
13428 && omp_is_allocatable_or_ptr (ovar))))
13429 var = build_simple_mem_ref (var);
13430 var = fold_convert (TREE_TYPE (x), var);
13433 tree present;
13434 present = omp_check_optional_argument (ovar, true);
13435 if (present)
13437 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13438 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13439 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13440 tree new_x = unshare_expr (x);
13441 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13442 fb_rvalue);
13443 gcond *cond = gimple_build_cond_from_tree (present,
13444 notnull_label,
13445 null_label);
13446 gimple_seq_add_stmt (&ilist, cond);
13447 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13448 gimplify_assign (new_x, null_pointer_node, &ilist);
13449 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13450 gimple_seq_add_stmt (&ilist,
13451 gimple_build_label (notnull_label));
13452 gimplify_assign (x, var, &ilist);
13453 gimple_seq_add_stmt (&ilist,
13454 gimple_build_label (opt_arg_label));
13456 else
13457 gimplify_assign (x, var, &ilist);
13458 s = size_int (0);
13459 purpose = size_int (map_idx++);
13460 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13461 gcc_checking_assert (tkind
13462 < (HOST_WIDE_INT_C (1U) << talign_shift));
13463 gcc_checking_assert (tkind
13464 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13465 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13466 build_int_cstu (tkind_type, tkind));
13467 break;
13470 gcc_assert (map_idx == map_cnt);
13472 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13473 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13474 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13475 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13476 for (int i = 1; i <= 2; i++)
13477 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13479 gimple_seq initlist = NULL;
13480 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13481 TREE_VEC_ELT (t, i)),
13482 &initlist, true, NULL_TREE);
13483 gimple_seq_add_seq (&ilist, initlist);
13485 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13486 gimple_seq_add_stmt (&olist,
13487 gimple_build_assign (TREE_VEC_ELT (t, i),
13488 clobber));
13490 else if (omp_maybe_offloaded_ctx (ctx->outer))
13492 tree id = get_identifier ("omp declare target");
13493 tree decl = TREE_VEC_ELT (t, i);
13494 DECL_ATTRIBUTES (decl)
13495 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13496 varpool_node *node = varpool_node::get (decl);
13497 if (node)
13499 node->offloadable = 1;
13500 if (ENABLE_OFFLOADING)
13502 g->have_offload = true;
13503 vec_safe_push (offload_vars, t);
13508 tree clobber = build_clobber (ctx->record_type);
13509 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13510 clobber));
13513 /* Once all the expansions are done, sequence all the different
13514 fragments inside gimple_omp_body. */
13516 new_body = NULL;
13518 if (offloaded
13519 && ctx->record_type)
13521 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13522 /* fixup_child_record_type might have changed receiver_decl's type. */
13523 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13524 gimple_seq_add_stmt (&new_body,
13525 gimple_build_assign (ctx->receiver_decl, t));
13527 gimple_seq_add_seq (&new_body, fplist);
13529 if (offloaded || data_region)
13531 tree prev = NULL_TREE;
13532 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13533 switch (OMP_CLAUSE_CODE (c))
13535 tree var, x;
13536 default:
13537 break;
13538 case OMP_CLAUSE_FIRSTPRIVATE:
13539 if (is_gimple_omp_oacc (ctx->stmt))
13540 break;
13541 var = OMP_CLAUSE_DECL (c);
13542 if (omp_privatize_by_reference (var)
13543 || is_gimple_reg_type (TREE_TYPE (var)))
13545 tree new_var = lookup_decl (var, ctx);
13546 tree type;
13547 type = TREE_TYPE (var);
13548 if (omp_privatize_by_reference (var))
13549 type = TREE_TYPE (type);
13550 if ((INTEGRAL_TYPE_P (type)
13551 && TYPE_PRECISION (type) <= POINTER_SIZE)
13552 || TREE_CODE (type) == POINTER_TYPE)
13554 x = build_receiver_ref (var, false, ctx);
13555 if (TREE_CODE (type) != POINTER_TYPE)
13556 x = fold_convert (pointer_sized_int_node, x);
13557 x = fold_convert (type, x);
13558 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13559 fb_rvalue);
13560 if (omp_privatize_by_reference (var))
13562 tree v = create_tmp_var_raw (type, get_name (var));
13563 gimple_add_tmp_var (v);
13564 TREE_ADDRESSABLE (v) = 1;
13565 gimple_seq_add_stmt (&new_body,
13566 gimple_build_assign (v, x));
13567 x = build_fold_addr_expr (v);
13569 gimple_seq_add_stmt (&new_body,
13570 gimple_build_assign (new_var, x));
13572 else
13574 bool by_ref = !omp_privatize_by_reference (var);
13575 x = build_receiver_ref (var, by_ref, ctx);
13576 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13577 fb_rvalue);
13578 gimple_seq_add_stmt (&new_body,
13579 gimple_build_assign (new_var, x));
13582 else if (is_variable_sized (var))
13584 tree pvar = DECL_VALUE_EXPR (var);
13585 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13586 pvar = TREE_OPERAND (pvar, 0);
13587 gcc_assert (DECL_P (pvar));
13588 tree new_var = lookup_decl (pvar, ctx);
13589 x = build_receiver_ref (var, false, ctx);
13590 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13591 gimple_seq_add_stmt (&new_body,
13592 gimple_build_assign (new_var, x));
13594 break;
13595 case OMP_CLAUSE_PRIVATE:
13596 if (is_gimple_omp_oacc (ctx->stmt))
13597 break;
13598 var = OMP_CLAUSE_DECL (c);
13599 if (omp_privatize_by_reference (var))
13601 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13602 tree new_var = lookup_decl (var, ctx);
13603 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13604 if (TREE_CONSTANT (x))
13606 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13607 get_name (var));
13608 gimple_add_tmp_var (x);
13609 TREE_ADDRESSABLE (x) = 1;
13610 x = build_fold_addr_expr_loc (clause_loc, x);
13612 else
13613 break;
13615 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13616 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13617 gimple_seq_add_stmt (&new_body,
13618 gimple_build_assign (new_var, x));
13620 break;
13621 case OMP_CLAUSE_USE_DEVICE_PTR:
13622 case OMP_CLAUSE_USE_DEVICE_ADDR:
13623 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13624 case OMP_CLAUSE_IS_DEVICE_PTR:
13625 tree new_var;
13626 gimple_seq assign_body;
13627 bool is_array_data;
13628 bool do_optional_check;
13629 assign_body = NULL;
13630 do_optional_check = false;
13631 var = OMP_CLAUSE_DECL (c);
13632 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13634 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13635 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13636 x = build_sender_ref (is_array_data
13637 ? (splay_tree_key) &DECL_NAME (var)
13638 : (splay_tree_key) &DECL_UID (var), ctx);
13639 else
13641 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13643 while (TREE_CODE (var) == INDIRECT_REF
13644 || TREE_CODE (var) == ARRAY_REF)
13645 var = TREE_OPERAND (var, 0);
13647 x = build_receiver_ref (var, false, ctx);
13650 if (is_array_data)
13652 bool is_ref = omp_privatize_by_reference (var);
13653 do_optional_check = true;
13654 /* First, we copy the descriptor data from the host; then
13655 we update its data to point to the target address. */
13656 new_var = lookup_decl (var, ctx);
13657 new_var = DECL_VALUE_EXPR (new_var);
13658 tree v = new_var;
13660 if (is_ref)
13662 var = build_fold_indirect_ref (var);
13663 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13664 fb_rvalue);
13665 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13666 gimple_add_tmp_var (v);
13667 TREE_ADDRESSABLE (v) = 1;
13668 gimple_seq_add_stmt (&assign_body,
13669 gimple_build_assign (v, var));
13670 tree rhs = build_fold_addr_expr (v);
13671 gimple_seq_add_stmt (&assign_body,
13672 gimple_build_assign (new_var, rhs));
13674 else
13675 gimple_seq_add_stmt (&assign_body,
13676 gimple_build_assign (new_var, var));
13678 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13679 gcc_assert (v2);
13680 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13681 gimple_seq_add_stmt (&assign_body,
13682 gimple_build_assign (v2, x));
13684 else if (is_variable_sized (var))
13686 tree pvar = DECL_VALUE_EXPR (var);
13687 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13688 pvar = TREE_OPERAND (pvar, 0);
13689 gcc_assert (DECL_P (pvar));
13690 new_var = lookup_decl (pvar, ctx);
13691 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13692 gimple_seq_add_stmt (&assign_body,
13693 gimple_build_assign (new_var, x));
13695 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13696 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13697 && !omp_privatize_by_reference (var)
13698 && !omp_is_allocatable_or_ptr (var))
13699 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13701 new_var = lookup_decl (var, ctx);
13702 new_var = DECL_VALUE_EXPR (new_var);
13703 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13704 new_var = TREE_OPERAND (new_var, 0);
13705 gcc_assert (DECL_P (new_var));
13706 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13707 gimple_seq_add_stmt (&assign_body,
13708 gimple_build_assign (new_var, x));
13710 else
13712 tree type = TREE_TYPE (var);
13713 new_var = lookup_decl (var, ctx);
13714 if (omp_privatize_by_reference (var))
13716 type = TREE_TYPE (type);
13717 if (POINTER_TYPE_P (type)
13718 && TREE_CODE (type) != ARRAY_TYPE
13719 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13720 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13721 || (omp_privatize_by_reference (var)
13722 && omp_is_allocatable_or_ptr (var))))
13724 tree v = create_tmp_var_raw (type, get_name (var));
13725 gimple_add_tmp_var (v);
13726 TREE_ADDRESSABLE (v) = 1;
13727 x = fold_convert (type, x);
13728 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13729 fb_rvalue);
13730 gimple_seq_add_stmt (&assign_body,
13731 gimple_build_assign (v, x));
13732 x = build_fold_addr_expr (v);
13733 do_optional_check = true;
13736 new_var = DECL_VALUE_EXPR (new_var);
13737 x = fold_convert (TREE_TYPE (new_var), x);
13738 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13739 gimple_seq_add_stmt (&assign_body,
13740 gimple_build_assign (new_var, x));
13742 tree present;
13743 present = ((do_optional_check
13744 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13745 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13746 : NULL_TREE);
13747 if (present)
13749 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13750 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13751 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13752 glabel *null_glabel = gimple_build_label (null_label);
13753 glabel *notnull_glabel = gimple_build_label (notnull_label);
13754 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13755 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13756 fb_rvalue);
13757 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13758 fb_rvalue);
13759 gcond *cond = gimple_build_cond_from_tree (present,
13760 notnull_label,
13761 null_label);
13762 gimple_seq_add_stmt (&new_body, cond);
13763 gimple_seq_add_stmt (&new_body, null_glabel);
13764 gimplify_assign (new_var, null_pointer_node, &new_body);
13765 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13766 gimple_seq_add_stmt (&new_body, notnull_glabel);
13767 gimple_seq_add_seq (&new_body, assign_body);
13768 gimple_seq_add_stmt (&new_body,
13769 gimple_build_label (opt_arg_label));
13771 else
13772 gimple_seq_add_seq (&new_body, assign_body);
13773 break;
13775 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13776 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13777 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13778 or references to VLAs. */
13779 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13780 switch (OMP_CLAUSE_CODE (c))
13782 tree var;
13783 default:
13784 break;
13785 case OMP_CLAUSE_MAP:
13786 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13787 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13789 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13790 poly_int64 offset = 0;
13791 gcc_assert (prev);
13792 var = OMP_CLAUSE_DECL (c);
13793 if (DECL_P (var)
13794 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13795 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13796 ctx))
13797 && varpool_node::get_create (var)->offloadable)
13798 break;
13799 if (TREE_CODE (var) == INDIRECT_REF
13800 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13801 var = TREE_OPERAND (var, 0);
13802 if (TREE_CODE (var) == COMPONENT_REF)
13804 var = get_addr_base_and_unit_offset (var, &offset);
13805 gcc_assert (var != NULL_TREE && DECL_P (var));
13807 else if (DECL_SIZE (var)
13808 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13810 tree var2 = DECL_VALUE_EXPR (var);
13811 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13812 var2 = TREE_OPERAND (var2, 0);
13813 gcc_assert (DECL_P (var2));
13814 var = var2;
13816 tree new_var = lookup_decl (var, ctx), x;
13817 tree type = TREE_TYPE (new_var);
13818 bool is_ref;
13819 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13820 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13821 == COMPONENT_REF))
13823 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13824 is_ref = true;
13825 new_var = build2 (MEM_REF, type,
13826 build_fold_addr_expr (new_var),
13827 build_int_cst (build_pointer_type (type),
13828 offset));
13830 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13832 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13833 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13834 new_var = build2 (MEM_REF, type,
13835 build_fold_addr_expr (new_var),
13836 build_int_cst (build_pointer_type (type),
13837 offset));
13839 else
13840 is_ref = omp_privatize_by_reference (var);
13841 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13842 is_ref = false;
13843 bool ref_to_array = false;
13844 if (is_ref)
13846 type = TREE_TYPE (type);
13847 if (TREE_CODE (type) == ARRAY_TYPE)
13849 type = build_pointer_type (type);
13850 ref_to_array = true;
13853 else if (TREE_CODE (type) == ARRAY_TYPE)
13855 tree decl2 = DECL_VALUE_EXPR (new_var);
13856 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13857 decl2 = TREE_OPERAND (decl2, 0);
13858 gcc_assert (DECL_P (decl2));
13859 new_var = decl2;
13860 type = TREE_TYPE (new_var);
13862 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13863 x = fold_convert_loc (clause_loc, type, x);
13864 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13866 tree bias = OMP_CLAUSE_SIZE (c);
13867 if (DECL_P (bias))
13868 bias = lookup_decl (bias, ctx);
13869 bias = fold_convert_loc (clause_loc, sizetype, bias);
13870 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13871 bias);
13872 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13873 TREE_TYPE (x), x, bias);
13875 if (ref_to_array)
13876 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13877 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13878 if (is_ref && !ref_to_array)
13880 tree t = create_tmp_var_raw (type, get_name (var));
13881 gimple_add_tmp_var (t);
13882 TREE_ADDRESSABLE (t) = 1;
13883 gimple_seq_add_stmt (&new_body,
13884 gimple_build_assign (t, x));
13885 x = build_fold_addr_expr_loc (clause_loc, t);
13887 gimple_seq_add_stmt (&new_body,
13888 gimple_build_assign (new_var, x));
13889 prev = NULL_TREE;
13891 else if (OMP_CLAUSE_CHAIN (c)
13892 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13893 == OMP_CLAUSE_MAP
13894 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13895 == GOMP_MAP_FIRSTPRIVATE_POINTER
13896 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13897 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13898 prev = c;
13899 break;
13900 case OMP_CLAUSE_PRIVATE:
13901 var = OMP_CLAUSE_DECL (c);
13902 if (is_variable_sized (var))
13904 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13905 tree new_var = lookup_decl (var, ctx);
13906 tree pvar = DECL_VALUE_EXPR (var);
13907 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13908 pvar = TREE_OPERAND (pvar, 0);
13909 gcc_assert (DECL_P (pvar));
13910 tree new_pvar = lookup_decl (pvar, ctx);
13911 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13912 tree al = size_int (DECL_ALIGN (var));
13913 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13914 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13915 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13916 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13917 gimple_seq_add_stmt (&new_body,
13918 gimple_build_assign (new_pvar, x));
13920 else if (omp_privatize_by_reference (var)
13921 && !is_gimple_omp_oacc (ctx->stmt))
13923 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13924 tree new_var = lookup_decl (var, ctx);
13925 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13926 if (TREE_CONSTANT (x))
13927 break;
13928 else
13930 tree atmp
13931 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13932 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13933 tree al = size_int (TYPE_ALIGN (rtype));
13934 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13937 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13938 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13939 gimple_seq_add_stmt (&new_body,
13940 gimple_build_assign (new_var, x));
13942 break;
13945 gimple_seq fork_seq = NULL;
13946 gimple_seq join_seq = NULL;
13948 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13950 /* If there are reductions on the offloaded region itself, treat
13951 them as a dummy GANG loop. */
13952 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13954 gcall *private_marker = lower_oacc_private_marker (ctx);
13956 if (private_marker)
13957 gimple_call_set_arg (private_marker, 2, level);
13959 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13960 false, NULL, private_marker, NULL, &fork_seq,
13961 &join_seq, ctx);
13964 gimple_seq_add_seq (&new_body, fork_seq);
13965 gimple_seq_add_seq (&new_body, tgt_body);
13966 gimple_seq_add_seq (&new_body, join_seq);
13968 if (offloaded)
13970 new_body = maybe_catch_exception (new_body);
13971 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13973 gimple_omp_set_body (stmt, new_body);
13976 bind = gimple_build_bind (NULL, NULL,
13977 tgt_bind ? gimple_bind_block (tgt_bind)
13978 : NULL_TREE);
13979 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13980 gimple_bind_add_seq (bind, ilist);
13981 gimple_bind_add_stmt (bind, stmt);
13982 gimple_bind_add_seq (bind, olist);
13984 pop_gimplify_context (NULL);
13986 if (dep_bind)
13988 gimple_bind_add_seq (dep_bind, dep_ilist);
13989 gimple_bind_add_stmt (dep_bind, bind);
13990 gimple_bind_add_seq (dep_bind, dep_olist);
13991 pop_gimplify_context (dep_bind);
13995 /* Expand code for an OpenMP teams directive. */
13997 static void
13998 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14000 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14001 push_gimplify_context ();
14003 tree block = make_node (BLOCK);
14004 gbind *bind = gimple_build_bind (NULL, NULL, block);
14005 gsi_replace (gsi_p, bind, true);
14006 gimple_seq bind_body = NULL;
14007 gimple_seq dlist = NULL;
14008 gimple_seq olist = NULL;
14010 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14011 OMP_CLAUSE_NUM_TEAMS);
14012 tree num_teams_lower = NULL_TREE;
14013 if (num_teams == NULL_TREE)
14014 num_teams = build_int_cst (unsigned_type_node, 0);
14015 else
14017 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14018 if (num_teams_lower)
14020 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14021 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14022 fb_rvalue);
14024 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14025 num_teams = fold_convert (unsigned_type_node, num_teams);
14026 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14028 if (num_teams_lower == NULL_TREE)
14029 num_teams_lower = num_teams;
14030 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14031 OMP_CLAUSE_THREAD_LIMIT);
14032 if (thread_limit == NULL_TREE)
14033 thread_limit = build_int_cst (unsigned_type_node, 0);
14034 else
14036 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14037 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14038 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14039 fb_rvalue);
14041 location_t loc = gimple_location (teams_stmt);
14042 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14043 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14044 tree first = create_tmp_var (rettype);
14045 gimple_seq_add_stmt (&bind_body,
14046 gimple_build_assign (first, build_one_cst (rettype)));
14047 tree llabel = create_artificial_label (loc);
14048 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14049 gimple *call
14050 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14051 first);
14052 gimple_set_location (call, loc);
14053 tree temp = create_tmp_var (rettype);
14054 gimple_call_set_lhs (call, temp);
14055 gimple_seq_add_stmt (&bind_body, call);
14057 tree tlabel = create_artificial_label (loc);
14058 tree flabel = create_artificial_label (loc);
14059 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14060 tlabel, flabel);
14061 gimple_seq_add_stmt (&bind_body, cond);
14062 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14063 gimple_seq_add_stmt (&bind_body,
14064 gimple_build_assign (first, build_zero_cst (rettype)));
14066 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14067 &bind_body, &dlist, ctx, NULL);
14068 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14069 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14070 NULL, ctx);
14071 gimple_seq_add_stmt (&bind_body, teams_stmt);
14073 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14074 gimple_omp_set_body (teams_stmt, NULL);
14075 gimple_seq_add_seq (&bind_body, olist);
14076 gimple_seq_add_seq (&bind_body, dlist);
14077 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14078 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14079 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14080 gimple_bind_set_body (bind, bind_body);
14082 pop_gimplify_context (bind);
14084 gimple_bind_append_vars (bind, ctx->block_vars);
14085 BLOCK_VARS (block) = ctx->block_vars;
14086 if (BLOCK_VARS (block))
14087 TREE_USED (block) = 1;
14090 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14091 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14092 of OMP context, but with make_addressable_vars set. */
14094 static tree
14095 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14096 void *data)
14098 tree t = *tp;
14100 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14101 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14102 && data == NULL
14103 && DECL_HAS_VALUE_EXPR_P (t))
14104 return t;
14106 if (make_addressable_vars
14107 && DECL_P (t)
14108 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14109 return t;
14111 /* If a global variable has been privatized, TREE_CONSTANT on
14112 ADDR_EXPR might be wrong. */
14113 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14114 recompute_tree_invariant_for_addr_expr (t);
14116 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14117 return NULL_TREE;
14120 /* Data to be communicated between lower_omp_regimplify_operands and
14121 lower_omp_regimplify_operands_p. */
14123 struct lower_omp_regimplify_operands_data
14125 omp_context *ctx;
14126 vec<tree> *decls;
14129 /* Helper function for lower_omp_regimplify_operands. Find
14130 omp_member_access_dummy_var vars and adjust temporarily their
14131 DECL_VALUE_EXPRs if needed. */
14133 static tree
14134 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14135 void *data)
14137 tree t = omp_member_access_dummy_var (*tp);
14138 if (t)
14140 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14141 lower_omp_regimplify_operands_data *ldata
14142 = (lower_omp_regimplify_operands_data *) wi->info;
14143 tree o = maybe_lookup_decl (t, ldata->ctx);
14144 if (o != t)
14146 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14147 ldata->decls->safe_push (*tp);
14148 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14149 SET_DECL_VALUE_EXPR (*tp, v);
14152 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14153 return NULL_TREE;
14156 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14157 of omp_member_access_dummy_var vars during regimplification. */
14159 static void
14160 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14161 gimple_stmt_iterator *gsi_p)
14163 auto_vec<tree, 10> decls;
14164 if (ctx)
14166 struct walk_stmt_info wi;
14167 memset (&wi, '\0', sizeof (wi));
14168 struct lower_omp_regimplify_operands_data data;
14169 data.ctx = ctx;
14170 data.decls = &decls;
14171 wi.info = &data;
14172 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14174 gimple_regimplify_operands (stmt, gsi_p);
14175 while (!decls.is_empty ())
14177 tree t = decls.pop ();
14178 tree v = decls.pop ();
14179 SET_DECL_VALUE_EXPR (t, v);
14183 static void
14184 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14186 gimple *stmt = gsi_stmt (*gsi_p);
14187 struct walk_stmt_info wi;
14188 gcall *call_stmt;
14190 if (gimple_has_location (stmt))
14191 input_location = gimple_location (stmt);
14193 if (make_addressable_vars)
14194 memset (&wi, '\0', sizeof (wi));
14196 /* If we have issued syntax errors, avoid doing any heavy lifting.
14197 Just replace the OMP directives with a NOP to avoid
14198 confusing RTL expansion. */
14199 if (seen_error () && is_gimple_omp (stmt))
14201 gsi_replace (gsi_p, gimple_build_nop (), true);
14202 return;
14205 switch (gimple_code (stmt))
14207 case GIMPLE_COND:
14209 gcond *cond_stmt = as_a <gcond *> (stmt);
14210 if ((ctx || make_addressable_vars)
14211 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14212 lower_omp_regimplify_p,
14213 ctx ? NULL : &wi, NULL)
14214 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14215 lower_omp_regimplify_p,
14216 ctx ? NULL : &wi, NULL)))
14217 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14219 break;
14220 case GIMPLE_CATCH:
14221 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14222 break;
14223 case GIMPLE_EH_FILTER:
14224 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14225 break;
14226 case GIMPLE_TRY:
14227 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14228 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14229 break;
14230 case GIMPLE_TRANSACTION:
14231 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14232 ctx);
14233 break;
14234 case GIMPLE_BIND:
14235 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14237 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14238 oacc_privatization_scan_decl_chain (ctx, vars);
14240 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14241 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14242 break;
14243 case GIMPLE_OMP_PARALLEL:
14244 case GIMPLE_OMP_TASK:
14245 ctx = maybe_lookup_ctx (stmt);
14246 gcc_assert (ctx);
14247 if (ctx->cancellable)
14248 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14249 lower_omp_taskreg (gsi_p, ctx);
14250 break;
14251 case GIMPLE_OMP_FOR:
14252 ctx = maybe_lookup_ctx (stmt);
14253 gcc_assert (ctx);
14254 if (ctx->cancellable)
14255 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14256 lower_omp_for (gsi_p, ctx);
14257 break;
14258 case GIMPLE_OMP_SECTIONS:
14259 ctx = maybe_lookup_ctx (stmt);
14260 gcc_assert (ctx);
14261 if (ctx->cancellable)
14262 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14263 lower_omp_sections (gsi_p, ctx);
14264 break;
14265 case GIMPLE_OMP_SCOPE:
14266 ctx = maybe_lookup_ctx (stmt);
14267 gcc_assert (ctx);
14268 lower_omp_scope (gsi_p, ctx);
14269 break;
14270 case GIMPLE_OMP_SINGLE:
14271 ctx = maybe_lookup_ctx (stmt);
14272 gcc_assert (ctx);
14273 lower_omp_single (gsi_p, ctx);
14274 break;
14275 case GIMPLE_OMP_MASTER:
14276 case GIMPLE_OMP_MASKED:
14277 ctx = maybe_lookup_ctx (stmt);
14278 gcc_assert (ctx);
14279 lower_omp_master (gsi_p, ctx);
14280 break;
14281 case GIMPLE_OMP_TASKGROUP:
14282 ctx = maybe_lookup_ctx (stmt);
14283 gcc_assert (ctx);
14284 lower_omp_taskgroup (gsi_p, ctx);
14285 break;
14286 case GIMPLE_OMP_ORDERED:
14287 ctx = maybe_lookup_ctx (stmt);
14288 gcc_assert (ctx);
14289 lower_omp_ordered (gsi_p, ctx);
14290 break;
14291 case GIMPLE_OMP_SCAN:
14292 ctx = maybe_lookup_ctx (stmt);
14293 gcc_assert (ctx);
14294 lower_omp_scan (gsi_p, ctx);
14295 break;
14296 case GIMPLE_OMP_CRITICAL:
14297 ctx = maybe_lookup_ctx (stmt);
14298 gcc_assert (ctx);
14299 lower_omp_critical (gsi_p, ctx);
14300 break;
14301 case GIMPLE_OMP_ATOMIC_LOAD:
14302 if ((ctx || make_addressable_vars)
14303 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14304 as_a <gomp_atomic_load *> (stmt)),
14305 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14306 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14307 break;
14308 case GIMPLE_OMP_TARGET:
14309 ctx = maybe_lookup_ctx (stmt);
14310 gcc_assert (ctx);
14311 lower_omp_target (gsi_p, ctx);
14312 break;
14313 case GIMPLE_OMP_TEAMS:
14314 ctx = maybe_lookup_ctx (stmt);
14315 gcc_assert (ctx);
14316 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14317 lower_omp_taskreg (gsi_p, ctx);
14318 else
14319 lower_omp_teams (gsi_p, ctx);
14320 break;
14321 case GIMPLE_CALL:
14322 tree fndecl;
14323 call_stmt = as_a <gcall *> (stmt);
14324 fndecl = gimple_call_fndecl (call_stmt);
14325 if (fndecl
14326 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14327 switch (DECL_FUNCTION_CODE (fndecl))
14329 case BUILT_IN_GOMP_BARRIER:
14330 if (ctx == NULL)
14331 break;
14332 /* FALLTHRU */
14333 case BUILT_IN_GOMP_CANCEL:
14334 case BUILT_IN_GOMP_CANCELLATION_POINT:
14335 omp_context *cctx;
14336 cctx = ctx;
14337 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14338 cctx = cctx->outer;
14339 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14340 if (!cctx->cancellable)
14342 if (DECL_FUNCTION_CODE (fndecl)
14343 == BUILT_IN_GOMP_CANCELLATION_POINT)
14345 stmt = gimple_build_nop ();
14346 gsi_replace (gsi_p, stmt, false);
14348 break;
14350 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14352 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14353 gimple_call_set_fndecl (call_stmt, fndecl);
14354 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14356 tree lhs;
14357 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14358 gimple_call_set_lhs (call_stmt, lhs);
14359 tree fallthru_label;
14360 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14361 gimple *g;
14362 g = gimple_build_label (fallthru_label);
14363 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14364 g = gimple_build_cond (NE_EXPR, lhs,
14365 fold_convert (TREE_TYPE (lhs),
14366 boolean_false_node),
14367 cctx->cancel_label, fallthru_label);
14368 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14369 break;
14370 default:
14371 break;
14373 goto regimplify;
14375 case GIMPLE_ASSIGN:
14376 for (omp_context *up = ctx; up; up = up->outer)
14378 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14379 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14380 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14381 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14382 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14383 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14384 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14385 && (gimple_omp_target_kind (up->stmt)
14386 == GF_OMP_TARGET_KIND_DATA)))
14387 continue;
14388 else if (!up->lastprivate_conditional_map)
14389 break;
14390 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14391 if (TREE_CODE (lhs) == MEM_REF
14392 && DECL_P (TREE_OPERAND (lhs, 0))
14393 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14394 0))) == REFERENCE_TYPE)
14395 lhs = TREE_OPERAND (lhs, 0);
14396 if (DECL_P (lhs))
14397 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14399 tree clauses;
14400 if (up->combined_into_simd_safelen1)
14402 up = up->outer;
14403 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14404 up = up->outer;
14406 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14407 clauses = gimple_omp_for_clauses (up->stmt);
14408 else
14409 clauses = gimple_omp_sections_clauses (up->stmt);
14410 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14411 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14412 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14413 OMP_CLAUSE__CONDTEMP_);
14414 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14415 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14416 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14419 /* FALLTHRU */
14421 default:
14422 regimplify:
14423 if ((ctx || make_addressable_vars)
14424 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14425 ctx ? NULL : &wi))
14427 /* Just remove clobbers, this should happen only if we have
14428 "privatized" local addressable variables in SIMD regions,
14429 the clobber isn't needed in that case and gimplifying address
14430 of the ARRAY_REF into a pointer and creating MEM_REF based
14431 clobber would create worse code than we get with the clobber
14432 dropped. */
14433 if (gimple_clobber_p (stmt))
14435 gsi_replace (gsi_p, gimple_build_nop (), true);
14436 break;
14438 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14440 break;
14444 static void
14445 lower_omp (gimple_seq *body, omp_context *ctx)
14447 location_t saved_location = input_location;
14448 gimple_stmt_iterator gsi;
14449 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14450 lower_omp_1 (&gsi, ctx);
14451 /* During gimplification, we haven't folded statments inside offloading
14452 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14453 if (target_nesting_level || taskreg_nesting_level)
14454 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14455 fold_stmt (&gsi);
14456 input_location = saved_location;
14459 /* Main entry point. */
14461 static unsigned int
14462 execute_lower_omp (void)
14464 gimple_seq body;
14465 int i;
14466 omp_context *ctx;
14468 /* This pass always runs, to provide PROP_gimple_lomp.
14469 But often, there is nothing to do. */
14470 if (flag_openacc == 0 && flag_openmp == 0
14471 && flag_openmp_simd == 0)
14472 return 0;
14474 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14475 delete_omp_context);
14477 body = gimple_body (current_function_decl);
14479 scan_omp (&body, NULL);
14480 gcc_assert (taskreg_nesting_level == 0);
14481 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14482 finish_taskreg_scan (ctx);
14483 taskreg_contexts.release ();
14485 if (all_contexts->root)
14487 if (make_addressable_vars)
14488 push_gimplify_context ();
14489 lower_omp (&body, NULL);
14490 if (make_addressable_vars)
14491 pop_gimplify_context (NULL);
14494 if (all_contexts)
14496 splay_tree_delete (all_contexts);
14497 all_contexts = NULL;
14499 BITMAP_FREE (make_addressable_vars);
14500 BITMAP_FREE (global_nonaddressable_vars);
14502 /* If current function is a method, remove artificial dummy VAR_DECL created
14503 for non-static data member privatization, they aren't needed for
14504 debuginfo nor anything else, have been already replaced everywhere in the
14505 IL and cause problems with LTO. */
14506 if (DECL_ARGUMENTS (current_function_decl)
14507 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14508 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14509 == POINTER_TYPE))
14510 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14512 for (auto task_stmt : task_cpyfns)
14513 finalize_task_copyfn (task_stmt);
14514 task_cpyfns.release ();
14515 return 0;
14518 namespace {
14520 const pass_data pass_data_lower_omp =
14522 GIMPLE_PASS, /* type */
14523 "omplower", /* name */
14524 OPTGROUP_OMP, /* optinfo_flags */
14525 TV_NONE, /* tv_id */
14526 PROP_gimple_any, /* properties_required */
14527 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14528 0, /* properties_destroyed */
14529 0, /* todo_flags_start */
14530 0, /* todo_flags_finish */
14533 class pass_lower_omp : public gimple_opt_pass
14535 public:
14536 pass_lower_omp (gcc::context *ctxt)
14537 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14540 /* opt_pass methods: */
14541 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14543 }; // class pass_lower_omp
14545 } // anon namespace
14547 gimple_opt_pass *
14548 make_pass_lower_omp (gcc::context *ctxt)
14550 return new pass_lower_omp (ctxt);
14553 /* The following is a utility to diagnose structured block violations.
14554 It is not part of the "omplower" pass, as that's invoked too late. It
14555 should be invoked by the respective front ends after gimplification. */
14557 static splay_tree all_labels;
14559 /* Check for mismatched contexts and generate an error if needed. Return
14560 true if an error is detected. */
14562 static bool
14563 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14564 gimple *branch_ctx, gimple *label_ctx)
14566 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14567 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14569 if (label_ctx == branch_ctx)
14570 return false;
14572 const char* kind = NULL;
14574 if (flag_openacc)
14576 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14577 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14579 gcc_checking_assert (kind == NULL);
14580 kind = "OpenACC";
14583 if (kind == NULL)
14585 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14586 kind = "OpenMP";
14589 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14590 so we could traverse it and issue a correct "exit" or "enter" error
14591 message upon a structured block violation.
14593 We built the context by building a list with tree_cons'ing, but there is
14594 no easy counterpart in gimple tuples. It seems like far too much work
14595 for issuing exit/enter error messages. If someone really misses the
14596 distinct error message... patches welcome. */
14598 #if 0
14599 /* Try to avoid confusing the user by producing and error message
14600 with correct "exit" or "enter" verbiage. We prefer "exit"
14601 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14602 if (branch_ctx == NULL)
14603 exit_p = false;
14604 else
14606 while (label_ctx)
14608 if (TREE_VALUE (label_ctx) == branch_ctx)
14610 exit_p = false;
14611 break;
14613 label_ctx = TREE_CHAIN (label_ctx);
14617 if (exit_p)
14618 error ("invalid exit from %s structured block", kind);
14619 else
14620 error ("invalid entry to %s structured block", kind);
14621 #endif
14623 /* If it's obvious we have an invalid entry, be specific about the error. */
14624 if (branch_ctx == NULL)
14625 error ("invalid entry to %s structured block", kind);
14626 else
14628 /* Otherwise, be vague and lazy, but efficient. */
14629 error ("invalid branch to/from %s structured block", kind);
14632 gsi_replace (gsi_p, gimple_build_nop (), false);
14633 return true;
14636 /* Pass 1: Create a minimal tree of structured blocks, and record
14637 where each label is found. */
14639 static tree
14640 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14641 struct walk_stmt_info *wi)
14643 gimple *context = (gimple *) wi->info;
14644 gimple *inner_context;
14645 gimple *stmt = gsi_stmt (*gsi_p);
14647 *handled_ops_p = true;
14649 switch (gimple_code (stmt))
14651 WALK_SUBSTMTS;
14653 case GIMPLE_OMP_PARALLEL:
14654 case GIMPLE_OMP_TASK:
14655 case GIMPLE_OMP_SCOPE:
14656 case GIMPLE_OMP_SECTIONS:
14657 case GIMPLE_OMP_SINGLE:
14658 case GIMPLE_OMP_SECTION:
14659 case GIMPLE_OMP_MASTER:
14660 case GIMPLE_OMP_MASKED:
14661 case GIMPLE_OMP_ORDERED:
14662 case GIMPLE_OMP_SCAN:
14663 case GIMPLE_OMP_CRITICAL:
14664 case GIMPLE_OMP_TARGET:
14665 case GIMPLE_OMP_TEAMS:
14666 case GIMPLE_OMP_TASKGROUP:
14667 /* The minimal context here is just the current OMP construct. */
14668 inner_context = stmt;
14669 wi->info = inner_context;
14670 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14671 wi->info = context;
14672 break;
14674 case GIMPLE_OMP_FOR:
14675 inner_context = stmt;
14676 wi->info = inner_context;
14677 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14678 walk them. */
14679 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14680 diagnose_sb_1, NULL, wi);
14681 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14682 wi->info = context;
14683 break;
14685 case GIMPLE_LABEL:
14686 splay_tree_insert (all_labels,
14687 (splay_tree_key) gimple_label_label (
14688 as_a <glabel *> (stmt)),
14689 (splay_tree_value) context);
14690 break;
14692 default:
14693 break;
14696 return NULL_TREE;
14699 /* Pass 2: Check each branch and see if its context differs from that of
14700 the destination label's context. */
14702 static tree
14703 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14704 struct walk_stmt_info *wi)
14706 gimple *context = (gimple *) wi->info;
14707 splay_tree_node n;
14708 gimple *stmt = gsi_stmt (*gsi_p);
14710 *handled_ops_p = true;
14712 switch (gimple_code (stmt))
14714 WALK_SUBSTMTS;
14716 case GIMPLE_OMP_PARALLEL:
14717 case GIMPLE_OMP_TASK:
14718 case GIMPLE_OMP_SCOPE:
14719 case GIMPLE_OMP_SECTIONS:
14720 case GIMPLE_OMP_SINGLE:
14721 case GIMPLE_OMP_SECTION:
14722 case GIMPLE_OMP_MASTER:
14723 case GIMPLE_OMP_MASKED:
14724 case GIMPLE_OMP_ORDERED:
14725 case GIMPLE_OMP_SCAN:
14726 case GIMPLE_OMP_CRITICAL:
14727 case GIMPLE_OMP_TARGET:
14728 case GIMPLE_OMP_TEAMS:
14729 case GIMPLE_OMP_TASKGROUP:
14730 wi->info = stmt;
14731 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14732 wi->info = context;
14733 break;
14735 case GIMPLE_OMP_FOR:
14736 wi->info = stmt;
14737 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14738 walk them. */
14739 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14740 diagnose_sb_2, NULL, wi);
14741 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14742 wi->info = context;
14743 break;
14745 case GIMPLE_COND:
14747 gcond *cond_stmt = as_a <gcond *> (stmt);
14748 tree lab = gimple_cond_true_label (cond_stmt);
14749 if (lab)
14751 n = splay_tree_lookup (all_labels,
14752 (splay_tree_key) lab);
14753 diagnose_sb_0 (gsi_p, context,
14754 n ? (gimple *) n->value : NULL);
14756 lab = gimple_cond_false_label (cond_stmt);
14757 if (lab)
14759 n = splay_tree_lookup (all_labels,
14760 (splay_tree_key) lab);
14761 diagnose_sb_0 (gsi_p, context,
14762 n ? (gimple *) n->value : NULL);
14765 break;
14767 case GIMPLE_GOTO:
14769 tree lab = gimple_goto_dest (stmt);
14770 if (TREE_CODE (lab) != LABEL_DECL)
14771 break;
14773 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14774 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14776 break;
14778 case GIMPLE_SWITCH:
14780 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14781 unsigned int i;
14782 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14784 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14785 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14786 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14787 break;
14790 break;
14792 case GIMPLE_RETURN:
14793 diagnose_sb_0 (gsi_p, context, NULL);
14794 break;
14796 default:
14797 break;
14800 return NULL_TREE;
14803 static unsigned int
14804 diagnose_omp_structured_block_errors (void)
14806 struct walk_stmt_info wi;
14807 gimple_seq body = gimple_body (current_function_decl);
14809 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14811 memset (&wi, 0, sizeof (wi));
14812 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14814 memset (&wi, 0, sizeof (wi));
14815 wi.want_locations = true;
14816 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14818 gimple_set_body (current_function_decl, body);
14820 splay_tree_delete (all_labels);
14821 all_labels = NULL;
14823 return 0;
14826 namespace {
14828 const pass_data pass_data_diagnose_omp_blocks =
14830 GIMPLE_PASS, /* type */
14831 "*diagnose_omp_blocks", /* name */
14832 OPTGROUP_OMP, /* optinfo_flags */
14833 TV_NONE, /* tv_id */
14834 PROP_gimple_any, /* properties_required */
14835 0, /* properties_provided */
14836 0, /* properties_destroyed */
14837 0, /* todo_flags_start */
14838 0, /* todo_flags_finish */
14841 class pass_diagnose_omp_blocks : public gimple_opt_pass
14843 public:
14844 pass_diagnose_omp_blocks (gcc::context *ctxt)
14845 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14848 /* opt_pass methods: */
14849 virtual bool gate (function *)
14851 return flag_openacc || flag_openmp || flag_openmp_simd;
14853 virtual unsigned int execute (function *)
14855 return diagnose_omp_structured_block_errors ();
14858 }; // class pass_diagnose_omp_blocks
14860 } // anon namespace
14862 gimple_opt_pass *
14863 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14865 return new pass_diagnose_omp_blocks (ctxt);
14869 #include "gt-omp-low.h"