d: Merge upstream dmd 56589f0f4, druntime 651389b5, phobos 1516ecad9.
[official-gcc.git] / gcc / omp-low.cc
blobd73c165f0298a6098829ddd2d8a8ce055b0681dd
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context *ctx);
200 #define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_TRANSACTION: \
206 /* The sub-statements for these should be walked. */ \
207 *handled_ops_p = false; \
208 break;
210 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
211 (This doesn't include OpenACC 'kernels' decomposed parts.) */
213 static bool
214 is_oacc_parallel_or_serial (omp_context *ctx)
216 enum gimple_code outer_type = gimple_code (ctx->stmt);
217 return ((outer_type == GIMPLE_OMP_TARGET)
218 && ((gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
220 || (gimple_omp_target_kind (ctx->stmt)
221 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
224 /* Return whether CTX represents an OpenACC 'kernels' construct.
225 (This doesn't include OpenACC 'kernels' decomposed parts.) */
227 static bool
228 is_oacc_kernels (omp_context *ctx)
230 enum gimple_code outer_type = gimple_code (ctx->stmt);
231 return ((outer_type == GIMPLE_OMP_TARGET)
232 && (gimple_omp_target_kind (ctx->stmt)
233 == GF_OMP_TARGET_KIND_OACC_KERNELS));
236 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
238 static bool
239 is_oacc_kernels_decomposed_part (omp_context *ctx)
241 enum gimple_code outer_type = gimple_code (ctx->stmt);
242 return ((outer_type == GIMPLE_OMP_TARGET)
243 && ((gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
247 || (gimple_omp_target_kind (ctx->stmt)
248 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
251 /* Return true if STMT corresponds to an OpenMP target region. */
252 static bool
253 is_omp_target (gimple *stmt)
255 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
257 int kind = gimple_omp_target_kind (stmt);
258 return (kind == GF_OMP_TARGET_KIND_REGION
259 || kind == GF_OMP_TARGET_KIND_DATA
260 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
261 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
263 return false;
266 /* If DECL is the artificial dummy VAR_DECL created for non-static
267 data member privatization, return the underlying "this" parameter,
268 otherwise return NULL. */
270 tree
271 omp_member_access_dummy_var (tree decl)
273 if (!VAR_P (decl)
274 || !DECL_ARTIFICIAL (decl)
275 || !DECL_IGNORED_P (decl)
276 || !DECL_HAS_VALUE_EXPR_P (decl)
277 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
278 return NULL_TREE;
280 tree v = DECL_VALUE_EXPR (decl);
281 if (TREE_CODE (v) != COMPONENT_REF)
282 return NULL_TREE;
284 while (1)
285 switch (TREE_CODE (v))
287 case COMPONENT_REF:
288 case MEM_REF:
289 case INDIRECT_REF:
290 CASE_CONVERT:
291 case POINTER_PLUS_EXPR:
292 v = TREE_OPERAND (v, 0);
293 continue;
294 case PARM_DECL:
295 if (DECL_CONTEXT (v) == current_function_decl
296 && DECL_ARTIFICIAL (v)
297 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
298 return v;
299 return NULL_TREE;
300 default:
301 return NULL_TREE;
305 /* Helper for unshare_and_remap, called through walk_tree. */
307 static tree
308 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
310 tree *pair = (tree *) data;
311 if (*tp == pair[0])
313 *tp = unshare_expr (pair[1]);
314 *walk_subtrees = 0;
316 else if (IS_TYPE_OR_DECL_P (*tp))
317 *walk_subtrees = 0;
318 return NULL_TREE;
321 /* Return unshare_expr (X) with all occurrences of FROM
322 replaced with TO. */
324 static tree
325 unshare_and_remap (tree x, tree from, tree to)
327 tree pair[2] = { from, to };
328 x = unshare_expr (x);
329 walk_tree (&x, unshare_and_remap_1, pair, NULL);
330 return x;
333 /* Convenience function for calling scan_omp_1_op on tree operands. */
335 static inline tree
336 scan_omp_op (tree *tp, omp_context *ctx)
338 struct walk_stmt_info wi;
340 memset (&wi, 0, sizeof (wi));
341 wi.info = ctx;
342 wi.want_locations = true;
344 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
347 static void lower_omp (gimple_seq *, omp_context *);
348 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
349 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
351 /* Return true if CTX is for an omp parallel. */
353 static inline bool
354 is_parallel_ctx (omp_context *ctx)
356 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
360 /* Return true if CTX is for an omp task. */
362 static inline bool
363 is_task_ctx (omp_context *ctx)
365 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
369 /* Return true if CTX is for an omp taskloop. */
371 static inline bool
372 is_taskloop_ctx (omp_context *ctx)
374 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
375 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
379 /* Return true if CTX is for a host omp teams. */
381 static inline bool
382 is_host_teams_ctx (omp_context *ctx)
384 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
385 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
388 /* Return true if CTX is for an omp parallel or omp task or host omp teams
389 (the last one is strictly not a task region in OpenMP speak, but we
390 need to treat it similarly). */
392 static inline bool
393 is_taskreg_ctx (omp_context *ctx)
395 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
398 /* Return true if EXPR is variable sized. */
400 static inline bool
401 is_variable_sized (const_tree expr)
403 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
406 /* Lookup variables. The "maybe" form
407 allows for the variable form to not have been entered, otherwise we
408 assert that the variable must have been entered. */
410 static inline tree
411 lookup_decl (tree var, omp_context *ctx)
413 tree *n = ctx->cb.decl_map->get (var);
414 return *n;
417 static inline tree
418 maybe_lookup_decl (const_tree var, omp_context *ctx)
420 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
421 return n ? *n : NULL_TREE;
424 static inline tree
425 lookup_field (tree var, omp_context *ctx)
427 splay_tree_node n;
428 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
429 return (tree) n->value;
432 static inline tree
433 lookup_sfield (splay_tree_key key, omp_context *ctx)
435 splay_tree_node n;
436 n = splay_tree_lookup (ctx->sfield_map
437 ? ctx->sfield_map : ctx->field_map, key);
438 return (tree) n->value;
441 static inline tree
442 lookup_sfield (tree var, omp_context *ctx)
444 return lookup_sfield ((splay_tree_key) var, ctx);
447 static inline tree
448 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
450 splay_tree_node n;
451 n = splay_tree_lookup (ctx->field_map, key);
452 return n ? (tree) n->value : NULL_TREE;
455 static inline tree
456 maybe_lookup_field (tree var, omp_context *ctx)
458 return maybe_lookup_field ((splay_tree_key) var, ctx);
461 /* Return true if DECL should be copied by pointer. SHARED_CTX is
462 the parallel context if DECL is to be shared. */
464 static bool
465 use_pointer_for_field (tree decl, omp_context *shared_ctx)
467 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
468 || TYPE_ATOMIC (TREE_TYPE (decl)))
469 return true;
471 /* We can only use copy-in/copy-out semantics for shared variables
472 when we know the value is not accessible from an outer scope. */
473 if (shared_ctx)
475 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
477 /* ??? Trivially accessible from anywhere. But why would we even
478 be passing an address in this case? Should we simply assert
479 this to be false, or should we have a cleanup pass that removes
480 these from the list of mappings? */
481 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
482 return true;
484 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
485 without analyzing the expression whether or not its location
486 is accessible to anyone else. In the case of nested parallel
487 regions it certainly may be. */
488 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
489 return true;
491 /* Do not use copy-in/copy-out for variables that have their
492 address taken. */
493 if (is_global_var (decl))
495 /* For file scope vars, track whether we've seen them as
496 non-addressable initially and in that case, keep the same
497 answer for the duration of the pass, even when they are made
498 addressable later on e.g. through reduction expansion. Global
499 variables which weren't addressable before the pass will not
500 have their privatized copies address taken. See PR91216. */
501 if (!TREE_ADDRESSABLE (decl))
503 if (!global_nonaddressable_vars)
504 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
505 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
507 else if (!global_nonaddressable_vars
508 || !bitmap_bit_p (global_nonaddressable_vars,
509 DECL_UID (decl)))
510 return true;
512 else if (TREE_ADDRESSABLE (decl))
513 return true;
515 /* lower_send_shared_vars only uses copy-in, but not copy-out
516 for these. */
517 if (TREE_READONLY (decl)
518 || ((TREE_CODE (decl) == RESULT_DECL
519 || TREE_CODE (decl) == PARM_DECL)
520 && DECL_BY_REFERENCE (decl)))
521 return false;
523 /* Disallow copy-in/out in nested parallel if
524 decl is shared in outer parallel, otherwise
525 each thread could store the shared variable
526 in its own copy-in location, making the
527 variable no longer really shared. */
528 if (shared_ctx->is_nested)
530 omp_context *up;
532 for (up = shared_ctx->outer; up; up = up->outer)
533 if ((is_taskreg_ctx (up)
534 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
535 && is_gimple_omp_offloaded (up->stmt)))
536 && maybe_lookup_decl (decl, up))
537 break;
539 if (up)
541 tree c;
543 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
545 for (c = gimple_omp_target_clauses (up->stmt);
546 c; c = OMP_CLAUSE_CHAIN (c))
547 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
548 && OMP_CLAUSE_DECL (c) == decl)
549 break;
551 else
552 for (c = gimple_omp_taskreg_clauses (up->stmt);
553 c; c = OMP_CLAUSE_CHAIN (c))
554 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
555 && OMP_CLAUSE_DECL (c) == decl)
556 break;
558 if (c)
559 goto maybe_mark_addressable_and_ret;
563 /* For tasks avoid using copy-in/out. As tasks can be
564 deferred or executed in different thread, when GOMP_task
565 returns, the task hasn't necessarily terminated. */
566 if (is_task_ctx (shared_ctx))
568 tree outer;
569 maybe_mark_addressable_and_ret:
570 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
571 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
573 /* Taking address of OUTER in lower_send_shared_vars
574 might need regimplification of everything that uses the
575 variable. */
576 if (!make_addressable_vars)
577 make_addressable_vars = BITMAP_ALLOC (NULL);
578 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
579 TREE_ADDRESSABLE (outer) = 1;
581 return true;
585 return false;
588 /* Construct a new automatic decl similar to VAR. */
590 static tree
591 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
593 tree copy = copy_var_decl (var, name, type);
595 DECL_CONTEXT (copy) = current_function_decl;
597 if (ctx)
599 DECL_CHAIN (copy) = ctx->block_vars;
600 ctx->block_vars = copy;
602 else
603 record_vars (copy);
605 /* If VAR is listed in make_addressable_vars, it wasn't
606 originally addressable, but was only later made so.
607 We don't need to take address of privatizations
608 from that var. */
609 if (TREE_ADDRESSABLE (var)
610 && ((make_addressable_vars
611 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
612 || (global_nonaddressable_vars
613 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
614 TREE_ADDRESSABLE (copy) = 0;
616 return copy;
619 static tree
620 omp_copy_decl_1 (tree var, omp_context *ctx)
622 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
625 /* Build tree nodes to access the field for VAR on the receiver side. */
627 static tree
628 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
630 tree x, field = lookup_field (var, ctx);
632 /* If the receiver record type was remapped in the child function,
633 remap the field into the new record type. */
634 x = maybe_lookup_field (field, ctx);
635 if (x != NULL)
636 field = x;
638 x = build_simple_mem_ref (ctx->receiver_decl);
639 TREE_THIS_NOTRAP (x) = 1;
640 x = omp_build_component_ref (x, field);
641 if (by_ref)
643 x = build_simple_mem_ref (x);
644 TREE_THIS_NOTRAP (x) = 1;
647 return x;
650 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
651 of a parallel, this is a component reference; for workshare constructs
652 this is some variable. */
654 static tree
655 build_outer_var_ref (tree var, omp_context *ctx,
656 enum omp_clause_code code = OMP_CLAUSE_ERROR)
658 tree x;
659 omp_context *outer = ctx->outer;
660 for (; outer; outer = outer->outer)
662 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
663 continue;
664 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
665 && !maybe_lookup_decl (var, outer))
666 continue;
667 break;
670 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
671 x = var;
672 else if (is_variable_sized (var))
674 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
675 x = build_outer_var_ref (x, ctx, code);
676 x = build_simple_mem_ref (x);
678 else if (is_taskreg_ctx (ctx))
680 bool by_ref = use_pointer_for_field (var, NULL);
681 x = build_receiver_ref (var, by_ref, ctx);
683 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
684 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
685 || ctx->loop_p
686 || code == OMP_CLAUSE_ALLOCATE
687 || (code == OMP_CLAUSE_PRIVATE
688 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
689 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
690 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
692 /* #pragma omp simd isn't a worksharing construct, and can reference
693 even private vars in its linear etc. clauses.
694 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
695 to private vars in all worksharing constructs. */
696 x = NULL_TREE;
697 if (outer && is_taskreg_ctx (outer))
698 x = lookup_decl (var, outer);
699 else if (outer)
700 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
701 if (x == NULL_TREE)
702 x = var;
704 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
706 gcc_assert (outer);
707 splay_tree_node n
708 = splay_tree_lookup (outer->field_map,
709 (splay_tree_key) &DECL_UID (var));
710 if (n == NULL)
712 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
713 x = var;
714 else
715 x = lookup_decl (var, outer);
717 else
719 tree field = (tree) n->value;
720 /* If the receiver record type was remapped in the child function,
721 remap the field into the new record type. */
722 x = maybe_lookup_field (field, outer);
723 if (x != NULL)
724 field = x;
726 x = build_simple_mem_ref (outer->receiver_decl);
727 x = omp_build_component_ref (x, field);
728 if (use_pointer_for_field (var, outer))
729 x = build_simple_mem_ref (x);
732 else if (outer)
733 x = lookup_decl (var, outer);
734 else if (omp_privatize_by_reference (var))
735 /* This can happen with orphaned constructs. If var is reference, it is
736 possible it is shared and as such valid. */
737 x = var;
738 else if (omp_member_access_dummy_var (var))
739 x = var;
740 else
741 gcc_unreachable ();
743 if (x == var)
745 tree t = omp_member_access_dummy_var (var);
746 if (t)
748 x = DECL_VALUE_EXPR (var);
749 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
750 if (o != t)
751 x = unshare_and_remap (x, t, o);
752 else
753 x = unshare_expr (x);
757 if (omp_privatize_by_reference (var))
758 x = build_simple_mem_ref (x);
760 return x;
763 /* Build tree nodes to access the field for VAR on the sender side. */
765 static tree
766 build_sender_ref (splay_tree_key key, omp_context *ctx)
768 tree field = lookup_sfield (key, ctx);
769 return omp_build_component_ref (ctx->sender_decl, field);
772 static tree
773 build_sender_ref (tree var, omp_context *ctx)
775 return build_sender_ref ((splay_tree_key) var, ctx);
778 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
779 BASE_POINTERS_RESTRICT, declare the field with restrict. */
781 static void
782 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
784 tree field, type, sfield = NULL_TREE;
785 splay_tree_key key = (splay_tree_key) var;
787 if ((mask & 16) != 0)
789 key = (splay_tree_key) &DECL_NAME (var);
790 gcc_checking_assert (key != (splay_tree_key) var);
792 if ((mask & 8) != 0)
794 key = (splay_tree_key) &DECL_UID (var);
795 gcc_checking_assert (key != (splay_tree_key) var);
797 gcc_assert ((mask & 1) == 0
798 || !splay_tree_lookup (ctx->field_map, key));
799 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
800 || !splay_tree_lookup (ctx->sfield_map, key));
801 gcc_assert ((mask & 3) == 3
802 || !is_gimple_omp_oacc (ctx->stmt));
804 type = TREE_TYPE (var);
805 if ((mask & 16) != 0)
806 type = lang_hooks.decls.omp_array_data (var, true);
808 /* Prevent redeclaring the var in the split-off function with a restrict
809 pointer type. Note that we only clear type itself, restrict qualifiers in
810 the pointed-to type will be ignored by points-to analysis. */
811 if (POINTER_TYPE_P (type)
812 && TYPE_RESTRICT (type))
813 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
815 if (mask & 4)
817 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
818 type = build_pointer_type (build_pointer_type (type));
820 else if (by_ref)
821 type = build_pointer_type (type);
822 else if ((mask & (32 | 3)) == 1
823 && omp_privatize_by_reference (var))
824 type = TREE_TYPE (type);
826 field = build_decl (DECL_SOURCE_LOCATION (var),
827 FIELD_DECL, DECL_NAME (var), type);
829 /* Remember what variable this field was created for. This does have a
830 side effect of making dwarf2out ignore this member, so for helpful
831 debugging we clear it later in delete_omp_context. */
832 DECL_ABSTRACT_ORIGIN (field) = var;
833 if ((mask & 16) == 0 && type == TREE_TYPE (var))
835 SET_DECL_ALIGN (field, DECL_ALIGN (var));
836 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
837 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
839 else
840 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
842 if ((mask & 3) == 3)
844 insert_field_into_struct (ctx->record_type, field);
845 if (ctx->srecord_type)
847 sfield = build_decl (DECL_SOURCE_LOCATION (var),
848 FIELD_DECL, DECL_NAME (var), type);
849 DECL_ABSTRACT_ORIGIN (sfield) = var;
850 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
851 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
852 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
853 insert_field_into_struct (ctx->srecord_type, sfield);
856 else
858 if (ctx->srecord_type == NULL_TREE)
860 tree t;
862 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
863 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
864 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
866 sfield = build_decl (DECL_SOURCE_LOCATION (t),
867 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
868 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
869 insert_field_into_struct (ctx->srecord_type, sfield);
870 splay_tree_insert (ctx->sfield_map,
871 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
872 (splay_tree_value) sfield);
875 sfield = field;
876 insert_field_into_struct ((mask & 1) ? ctx->record_type
877 : ctx->srecord_type, field);
880 if (mask & 1)
881 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
882 if ((mask & 2) && ctx->sfield_map)
883 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
886 static tree
887 install_var_local (tree var, omp_context *ctx)
889 tree new_var = omp_copy_decl_1 (var, ctx);
890 insert_decl_map (&ctx->cb, var, new_var);
891 return new_var;
894 /* Adjust the replacement for DECL in CTX for the new context. This means
895 copying the DECL_VALUE_EXPR, and fixing up the type. */
897 static void
898 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
900 tree new_decl, size;
902 new_decl = lookup_decl (decl, ctx);
904 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
906 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
907 && DECL_HAS_VALUE_EXPR_P (decl))
909 tree ve = DECL_VALUE_EXPR (decl);
910 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
911 SET_DECL_VALUE_EXPR (new_decl, ve);
912 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
915 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
917 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
918 if (size == error_mark_node)
919 size = TYPE_SIZE (TREE_TYPE (new_decl));
920 DECL_SIZE (new_decl) = size;
922 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
923 if (size == error_mark_node)
924 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
925 DECL_SIZE_UNIT (new_decl) = size;
929 /* The callback for remap_decl. Search all containing contexts for a
930 mapping of the variable; this avoids having to duplicate the splay
931 tree ahead of time. We know a mapping doesn't already exist in the
932 given context. Create new mappings to implement default semantics. */
934 static tree
935 omp_copy_decl (tree var, copy_body_data *cb)
937 omp_context *ctx = (omp_context *) cb;
938 tree new_var;
940 if (TREE_CODE (var) == LABEL_DECL)
942 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
943 return var;
944 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
945 DECL_CONTEXT (new_var) = current_function_decl;
946 insert_decl_map (&ctx->cb, var, new_var);
947 return new_var;
950 while (!is_taskreg_ctx (ctx))
952 ctx = ctx->outer;
953 if (ctx == NULL)
954 return var;
955 new_var = maybe_lookup_decl (var, ctx);
956 if (new_var)
957 return new_var;
960 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
961 return var;
963 return error_mark_node;
966 /* Create a new context, with OUTER_CTX being the surrounding context. */
968 static omp_context *
969 new_omp_context (gimple *stmt, omp_context *outer_ctx)
971 omp_context *ctx = XCNEW (omp_context);
973 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
974 (splay_tree_value) ctx);
975 ctx->stmt = stmt;
977 if (outer_ctx)
979 ctx->outer = outer_ctx;
980 ctx->cb = outer_ctx->cb;
981 ctx->cb.block = NULL;
982 ctx->depth = outer_ctx->depth + 1;
984 else
986 ctx->cb.src_fn = current_function_decl;
987 ctx->cb.dst_fn = current_function_decl;
988 ctx->cb.src_node = cgraph_node::get (current_function_decl);
989 gcc_checking_assert (ctx->cb.src_node);
990 ctx->cb.dst_node = ctx->cb.src_node;
991 ctx->cb.src_cfun = cfun;
992 ctx->cb.copy_decl = omp_copy_decl;
993 ctx->cb.eh_lp_nr = 0;
994 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
995 ctx->cb.adjust_array_error_bounds = true;
996 ctx->cb.dont_remap_vla_if_no_change = true;
997 ctx->depth = 1;
1000 ctx->cb.decl_map = new hash_map<tree, tree>;
1002 return ctx;
1005 static gimple_seq maybe_catch_exception (gimple_seq);
1007 /* Finalize task copyfn. */
1009 static void
1010 finalize_task_copyfn (gomp_task *task_stmt)
1012 struct function *child_cfun;
1013 tree child_fn;
1014 gimple_seq seq = NULL, new_seq;
1015 gbind *bind;
1017 child_fn = gimple_omp_task_copy_fn (task_stmt);
1018 if (child_fn == NULL_TREE)
1019 return;
1021 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1022 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1024 push_cfun (child_cfun);
1025 bind = gimplify_body (child_fn, false);
1026 gimple_seq_add_stmt (&seq, bind);
1027 new_seq = maybe_catch_exception (seq);
1028 if (new_seq != seq)
1030 bind = gimple_build_bind (NULL, new_seq, NULL);
1031 seq = NULL;
1032 gimple_seq_add_stmt (&seq, bind);
1034 gimple_set_body (child_fn, seq);
1035 pop_cfun ();
1037 /* Inform the callgraph about the new function. */
1038 cgraph_node *node = cgraph_node::get_create (child_fn);
1039 node->parallelized_function = 1;
1040 cgraph_node::add_new_function (child_fn, false);
1043 /* Destroy a omp_context data structures. Called through the splay tree
1044 value delete callback. */
1046 static void
1047 delete_omp_context (splay_tree_value value)
1049 omp_context *ctx = (omp_context *) value;
1051 delete ctx->cb.decl_map;
1053 if (ctx->field_map)
1054 splay_tree_delete (ctx->field_map);
1055 if (ctx->sfield_map)
1056 splay_tree_delete (ctx->sfield_map);
1058 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1059 it produces corrupt debug information. */
1060 if (ctx->record_type)
1062 tree t;
1063 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1064 DECL_ABSTRACT_ORIGIN (t) = NULL;
1066 if (ctx->srecord_type)
1068 tree t;
1069 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1070 DECL_ABSTRACT_ORIGIN (t) = NULL;
1073 if (ctx->task_reduction_map)
1075 ctx->task_reductions.release ();
1076 delete ctx->task_reduction_map;
1079 delete ctx->lastprivate_conditional_map;
1080 delete ctx->allocate_map;
1082 XDELETE (ctx);
1085 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1086 context. */
1088 static void
1089 fixup_child_record_type (omp_context *ctx)
1091 tree f, type = ctx->record_type;
1093 if (!ctx->receiver_decl)
1094 return;
1095 /* ??? It isn't sufficient to just call remap_type here, because
1096 variably_modified_type_p doesn't work the way we expect for
1097 record types. Testing each field for whether it needs remapping
1098 and creating a new record by hand works, however. */
1099 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1100 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1101 break;
1102 if (f)
1104 tree name, new_fields = NULL;
1106 type = lang_hooks.types.make_type (RECORD_TYPE);
1107 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1108 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1109 TYPE_DECL, name, type);
1110 TYPE_NAME (type) = name;
1112 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1114 tree new_f = copy_node (f);
1115 DECL_CONTEXT (new_f) = type;
1116 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1117 DECL_CHAIN (new_f) = new_fields;
1118 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1119 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1120 &ctx->cb, NULL);
1121 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1122 &ctx->cb, NULL);
1123 new_fields = new_f;
1125 /* Arrange to be able to look up the receiver field
1126 given the sender field. */
1127 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1128 (splay_tree_value) new_f);
1130 TYPE_FIELDS (type) = nreverse (new_fields);
1131 layout_type (type);
1134 /* In a target region we never modify any of the pointers in *.omp_data_i,
1135 so attempt to help the optimizers. */
1136 if (is_gimple_omp_offloaded (ctx->stmt))
1137 type = build_qualified_type (type, TYPE_QUAL_CONST);
1139 TREE_TYPE (ctx->receiver_decl)
1140 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1143 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1144 specified by CLAUSES. */
1146 static void
1147 scan_sharing_clauses (tree clauses, omp_context *ctx)
1149 tree c, decl;
1150 bool scan_array_reductions = false;
1152 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1153 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1154 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1155 /* omp_default_mem_alloc is 1 */
1156 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1157 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1159 /* The allocate clauses that appear on a target construct or on
1160 constructs in a target region must specify an allocator expression
1161 unless a requires directive with the dynamic_allocators clause
1162 is present in the same compilation unit. */
1163 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1164 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1165 && omp_maybe_offloaded_ctx (ctx))
1166 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1167 " specify an allocator here");
1168 if (ctx->allocate_map == NULL)
1169 ctx->allocate_map = new hash_map<tree, tree>;
1170 tree val = integer_zero_node;
1171 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1172 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1173 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1174 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1175 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1178 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1180 bool by_ref;
1182 switch (OMP_CLAUSE_CODE (c))
1184 case OMP_CLAUSE_PRIVATE:
1185 decl = OMP_CLAUSE_DECL (c);
1186 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1187 goto do_private;
1188 else if (!is_variable_sized (decl))
1189 install_var_local (decl, ctx);
1190 break;
1192 case OMP_CLAUSE_SHARED:
1193 decl = OMP_CLAUSE_DECL (c);
1194 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1195 ctx->allocate_map->remove (decl);
1196 /* Ignore shared directives in teams construct inside of
1197 target construct. */
1198 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1199 && !is_host_teams_ctx (ctx))
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1204 if (is_global_var (odecl))
1205 break;
1206 insert_decl_map (&ctx->cb, decl, odecl);
1207 break;
1209 gcc_assert (is_taskreg_ctx (ctx));
1210 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1211 || !is_variable_sized (decl));
1212 /* Global variables don't need to be copied,
1213 the receiver side will use them directly. */
1214 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1215 break;
1216 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1218 use_pointer_for_field (decl, ctx);
1219 break;
1221 by_ref = use_pointer_for_field (decl, NULL);
1222 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1223 || TREE_ADDRESSABLE (decl)
1224 || by_ref
1225 || omp_privatize_by_reference (decl))
1227 by_ref = use_pointer_for_field (decl, ctx);
1228 install_var_field (decl, by_ref, 3, ctx);
1229 install_var_local (decl, ctx);
1230 break;
1232 /* We don't need to copy const scalar vars back. */
1233 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1234 goto do_private;
1236 case OMP_CLAUSE_REDUCTION:
1237 /* Collect 'reduction' clauses on OpenACC compute construct. */
1238 if (is_gimple_omp_oacc (ctx->stmt)
1239 && is_gimple_omp_offloaded (ctx->stmt))
1241 /* No 'reduction' clauses on OpenACC 'kernels'. */
1242 gcc_checking_assert (!is_oacc_kernels (ctx));
1243 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1244 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1246 ctx->local_reduction_clauses
1247 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1249 /* FALLTHRU */
1251 case OMP_CLAUSE_IN_REDUCTION:
1252 decl = OMP_CLAUSE_DECL (c);
1253 if (ctx->allocate_map
1254 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1255 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1256 || OMP_CLAUSE_REDUCTION_TASK (c)))
1257 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1258 || is_task_ctx (ctx)))
1260 /* For now. */
1261 if (ctx->allocate_map->get (decl))
1262 ctx->allocate_map->remove (decl);
1264 if (TREE_CODE (decl) == MEM_REF)
1266 tree t = TREE_OPERAND (decl, 0);
1267 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1268 t = TREE_OPERAND (t, 0);
1269 if (TREE_CODE (t) == INDIRECT_REF
1270 || TREE_CODE (t) == ADDR_EXPR)
1271 t = TREE_OPERAND (t, 0);
1272 if (is_omp_target (ctx->stmt))
1274 if (is_variable_sized (t))
1276 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1277 t = DECL_VALUE_EXPR (t);
1278 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1279 t = TREE_OPERAND (t, 0);
1280 gcc_assert (DECL_P (t));
1282 tree at = t;
1283 if (ctx->outer)
1284 scan_omp_op (&at, ctx->outer);
1285 tree nt = omp_copy_decl_1 (at, ctx->outer);
1286 splay_tree_insert (ctx->field_map,
1287 (splay_tree_key) &DECL_CONTEXT (t),
1288 (splay_tree_value) nt);
1289 if (at != t)
1290 splay_tree_insert (ctx->field_map,
1291 (splay_tree_key) &DECL_CONTEXT (at),
1292 (splay_tree_value) nt);
1293 break;
1295 install_var_local (t, ctx);
1296 if (is_taskreg_ctx (ctx)
1297 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1298 || (is_task_ctx (ctx)
1299 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1300 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1302 == POINTER_TYPE)))))
1303 && !is_variable_sized (t)
1304 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1305 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1306 && !is_task_ctx (ctx))))
1308 by_ref = use_pointer_for_field (t, NULL);
1309 if (is_task_ctx (ctx)
1310 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1311 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1313 install_var_field (t, false, 1, ctx);
1314 install_var_field (t, by_ref, 2, ctx);
1316 else
1317 install_var_field (t, by_ref, 3, ctx);
1319 break;
1321 if (is_omp_target (ctx->stmt))
1323 tree at = decl;
1324 if (ctx->outer)
1325 scan_omp_op (&at, ctx->outer);
1326 tree nt = omp_copy_decl_1 (at, ctx->outer);
1327 splay_tree_insert (ctx->field_map,
1328 (splay_tree_key) &DECL_CONTEXT (decl),
1329 (splay_tree_value) nt);
1330 if (at != decl)
1331 splay_tree_insert (ctx->field_map,
1332 (splay_tree_key) &DECL_CONTEXT (at),
1333 (splay_tree_value) nt);
1334 break;
1336 if (is_task_ctx (ctx)
1337 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1338 && OMP_CLAUSE_REDUCTION_TASK (c)
1339 && is_parallel_ctx (ctx)))
1341 /* Global variables don't need to be copied,
1342 the receiver side will use them directly. */
1343 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1345 by_ref = use_pointer_for_field (decl, ctx);
1346 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1347 install_var_field (decl, by_ref, 3, ctx);
1349 install_var_local (decl, ctx);
1350 break;
1352 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1353 && OMP_CLAUSE_REDUCTION_TASK (c))
1355 install_var_local (decl, ctx);
1356 break;
1358 goto do_private;
1360 case OMP_CLAUSE_LASTPRIVATE:
1361 /* Let the corresponding firstprivate clause create
1362 the variable. */
1363 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1364 break;
1365 /* FALLTHRU */
1367 case OMP_CLAUSE_FIRSTPRIVATE:
1368 case OMP_CLAUSE_LINEAR:
1369 decl = OMP_CLAUSE_DECL (c);
1370 do_private:
1371 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1372 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1374 && is_gimple_omp_offloaded (ctx->stmt))
1376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1377 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1378 && lang_hooks.decls.omp_array_data (decl, true)))
1380 by_ref = !omp_privatize_by_reference (decl);
1381 install_var_field (decl, by_ref, 3, ctx);
1383 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1385 if (TREE_CODE (decl) == INDIRECT_REF)
1386 decl = TREE_OPERAND (decl, 0);
1387 install_var_field (decl, true, 3, ctx);
1389 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1390 install_var_field (decl, true, 3, ctx);
1391 else
1392 install_var_field (decl, false, 3, ctx);
1394 if (is_variable_sized (decl))
1396 if (is_task_ctx (ctx))
1398 if (ctx->allocate_map
1399 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1401 /* For now. */
1402 if (ctx->allocate_map->get (decl))
1403 ctx->allocate_map->remove (decl);
1405 install_var_field (decl, false, 1, ctx);
1407 break;
1409 else if (is_taskreg_ctx (ctx))
1411 bool global
1412 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1413 by_ref = use_pointer_for_field (decl, NULL);
1415 if (is_task_ctx (ctx)
1416 && (global || by_ref || omp_privatize_by_reference (decl)))
1418 if (ctx->allocate_map
1419 && ctx->allocate_map->get (decl))
1420 install_var_field (decl, by_ref, 32 | 1, ctx);
1421 else
1422 install_var_field (decl, false, 1, ctx);
1423 if (!global)
1424 install_var_field (decl, by_ref, 2, ctx);
1426 else if (!global)
1427 install_var_field (decl, by_ref, 3, ctx);
1429 install_var_local (decl, ctx);
1430 /* For descr arrays on target: firstprivatize data + attach ptr. */
1431 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1432 && is_gimple_omp_offloaded (ctx->stmt)
1433 && !is_gimple_omp_oacc (ctx->stmt)
1434 && lang_hooks.decls.omp_array_data (decl, true))
1436 install_var_field (decl, false, 16 | 3, ctx);
1437 install_var_field (decl, true, 8 | 3, ctx);
1439 break;
1441 case OMP_CLAUSE_USE_DEVICE_PTR:
1442 case OMP_CLAUSE_USE_DEVICE_ADDR:
1443 decl = OMP_CLAUSE_DECL (c);
1445 /* Fortran array descriptors. */
1446 if (lang_hooks.decls.omp_array_data (decl, true))
1447 install_var_field (decl, false, 19, ctx);
1448 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1449 && !omp_privatize_by_reference (decl)
1450 && !omp_is_allocatable_or_ptr (decl))
1451 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1452 install_var_field (decl, true, 11, ctx);
1453 else
1454 install_var_field (decl, false, 11, ctx);
1455 if (DECL_SIZE (decl)
1456 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1458 tree decl2 = DECL_VALUE_EXPR (decl);
1459 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1460 decl2 = TREE_OPERAND (decl2, 0);
1461 gcc_assert (DECL_P (decl2));
1462 install_var_local (decl2, ctx);
1464 install_var_local (decl, ctx);
1465 break;
1467 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1468 decl = OMP_CLAUSE_DECL (c);
1469 while (TREE_CODE (decl) == INDIRECT_REF
1470 || TREE_CODE (decl) == ARRAY_REF)
1471 decl = TREE_OPERAND (decl, 0);
1472 goto do_private;
1474 case OMP_CLAUSE_IS_DEVICE_PTR:
1475 decl = OMP_CLAUSE_DECL (c);
1476 goto do_private;
1478 case OMP_CLAUSE__LOOPTEMP_:
1479 case OMP_CLAUSE__REDUCTEMP_:
1480 gcc_assert (is_taskreg_ctx (ctx));
1481 decl = OMP_CLAUSE_DECL (c);
1482 install_var_field (decl, false, 3, ctx);
1483 install_var_local (decl, ctx);
1484 break;
1486 case OMP_CLAUSE_COPYPRIVATE:
1487 case OMP_CLAUSE_COPYIN:
1488 decl = OMP_CLAUSE_DECL (c);
1489 by_ref = use_pointer_for_field (decl, NULL);
1490 install_var_field (decl, by_ref, 3, ctx);
1491 break;
1493 case OMP_CLAUSE_FINAL:
1494 case OMP_CLAUSE_IF:
1495 case OMP_CLAUSE_NUM_THREADS:
1496 case OMP_CLAUSE_NUM_TEAMS:
1497 case OMP_CLAUSE_THREAD_LIMIT:
1498 case OMP_CLAUSE_DEVICE:
1499 case OMP_CLAUSE_SCHEDULE:
1500 case OMP_CLAUSE_DIST_SCHEDULE:
1501 case OMP_CLAUSE_DEPEND:
1502 case OMP_CLAUSE_PRIORITY:
1503 case OMP_CLAUSE_GRAINSIZE:
1504 case OMP_CLAUSE_NUM_TASKS:
1505 case OMP_CLAUSE_NUM_GANGS:
1506 case OMP_CLAUSE_NUM_WORKERS:
1507 case OMP_CLAUSE_VECTOR_LENGTH:
1508 case OMP_CLAUSE_DETACH:
1509 case OMP_CLAUSE_FILTER:
1510 if (ctx->outer)
1511 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1512 break;
1514 case OMP_CLAUSE_TO:
1515 case OMP_CLAUSE_FROM:
1516 case OMP_CLAUSE_MAP:
1517 if (ctx->outer)
1518 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1519 decl = OMP_CLAUSE_DECL (c);
1520 /* If requested, make 'decl' addressable. */
1521 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1522 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1524 gcc_checking_assert (DECL_P (decl));
1526 bool decl_addressable = TREE_ADDRESSABLE (decl);
1527 if (!decl_addressable)
1529 if (!make_addressable_vars)
1530 make_addressable_vars = BITMAP_ALLOC (NULL);
1531 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1532 TREE_ADDRESSABLE (decl) = 1;
1535 if (dump_enabled_p ())
1537 location_t loc = OMP_CLAUSE_LOCATION (c);
1538 const dump_user_location_t d_u_loc
1539 = dump_user_location_t::from_location_t (loc);
1540 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1541 #if __GNUC__ >= 10
1542 # pragma GCC diagnostic push
1543 # pragma GCC diagnostic ignored "-Wformat"
1544 #endif
1545 if (!decl_addressable)
1546 dump_printf_loc (MSG_NOTE, d_u_loc,
1547 "variable %<%T%>"
1548 " made addressable\n",
1549 decl);
1550 else
1551 dump_printf_loc (MSG_NOTE, d_u_loc,
1552 "variable %<%T%>"
1553 " already made addressable\n",
1554 decl);
1555 #if __GNUC__ >= 10
1556 # pragma GCC diagnostic pop
1557 #endif
1560 /* Done. */
1561 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1563 /* Global variables with "omp declare target" attribute
1564 don't need to be copied, the receiver side will use them
1565 directly. However, global variables with "omp declare target link"
1566 attribute need to be copied. Or when ALWAYS modifier is used. */
1567 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1568 && DECL_P (decl)
1569 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1570 && (OMP_CLAUSE_MAP_KIND (c)
1571 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1572 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1573 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1574 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1575 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1576 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1578 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1579 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1580 && varpool_node::get_create (decl)->offloadable
1581 && !lookup_attribute ("omp declare target link",
1582 DECL_ATTRIBUTES (decl)))
1583 break;
1584 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1585 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1587 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1588 not offloaded; there is nothing to map for those. */
1589 if (!is_gimple_omp_offloaded (ctx->stmt)
1590 && !POINTER_TYPE_P (TREE_TYPE (decl))
1591 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1592 break;
1594 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1595 && DECL_P (decl)
1596 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1597 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1598 && is_omp_target (ctx->stmt))
1600 /* If this is an offloaded region, an attach operation should
1601 only exist when the pointer variable is mapped in a prior
1602 clause. */
1603 if (is_gimple_omp_offloaded (ctx->stmt))
1604 gcc_assert
1605 (maybe_lookup_decl (decl, ctx)
1606 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1607 && lookup_attribute ("omp declare target",
1608 DECL_ATTRIBUTES (decl))));
1610 /* By itself, attach/detach is generated as part of pointer
1611 variable mapping and should not create new variables in the
1612 offloaded region, however sender refs for it must be created
1613 for its address to be passed to the runtime. */
1614 tree field
1615 = build_decl (OMP_CLAUSE_LOCATION (c),
1616 FIELD_DECL, NULL_TREE, ptr_type_node);
1617 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1618 insert_field_into_struct (ctx->record_type, field);
1619 /* To not clash with a map of the pointer variable itself,
1620 attach/detach maps have their field looked up by the *clause*
1621 tree expression, not the decl. */
1622 gcc_assert (!splay_tree_lookup (ctx->field_map,
1623 (splay_tree_key) c));
1624 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1625 (splay_tree_value) field);
1626 break;
1628 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1629 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1630 || (OMP_CLAUSE_MAP_KIND (c)
1631 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1633 if (TREE_CODE (decl) == COMPONENT_REF
1634 || (TREE_CODE (decl) == INDIRECT_REF
1635 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1636 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1637 == REFERENCE_TYPE)))
1638 break;
1639 if (DECL_SIZE (decl)
1640 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1642 tree decl2 = DECL_VALUE_EXPR (decl);
1643 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1644 decl2 = TREE_OPERAND (decl2, 0);
1645 gcc_assert (DECL_P (decl2));
1646 install_var_local (decl2, ctx);
1648 install_var_local (decl, ctx);
1649 break;
1651 if (DECL_P (decl))
1653 if (DECL_SIZE (decl)
1654 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1656 tree decl2 = DECL_VALUE_EXPR (decl);
1657 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1658 decl2 = TREE_OPERAND (decl2, 0);
1659 gcc_assert (DECL_P (decl2));
1660 install_var_field (decl2, true, 3, ctx);
1661 install_var_local (decl2, ctx);
1662 install_var_local (decl, ctx);
1664 else
1666 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1667 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1668 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1669 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1670 install_var_field (decl, true, 7, ctx);
1671 else
1672 install_var_field (decl, true, 3, ctx);
1673 if (is_gimple_omp_offloaded (ctx->stmt)
1674 && !(is_gimple_omp_oacc (ctx->stmt)
1675 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1676 install_var_local (decl, ctx);
1679 else
1681 tree base = get_base_address (decl);
1682 tree nc = OMP_CLAUSE_CHAIN (c);
1683 if (DECL_P (base)
1684 && nc != NULL_TREE
1685 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1686 && OMP_CLAUSE_DECL (nc) == base
1687 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1688 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1690 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1691 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1693 else
1695 if (ctx->outer)
1697 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1698 decl = OMP_CLAUSE_DECL (c);
1700 gcc_assert (!splay_tree_lookup (ctx->field_map,
1701 (splay_tree_key) decl));
1702 tree field
1703 = build_decl (OMP_CLAUSE_LOCATION (c),
1704 FIELD_DECL, NULL_TREE, ptr_type_node);
1705 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1706 insert_field_into_struct (ctx->record_type, field);
1707 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1708 (splay_tree_value) field);
1711 break;
1713 case OMP_CLAUSE_ORDER:
1714 ctx->order_concurrent = true;
1715 break;
1717 case OMP_CLAUSE_BIND:
1718 ctx->loop_p = true;
1719 break;
1721 case OMP_CLAUSE_NOWAIT:
1722 case OMP_CLAUSE_ORDERED:
1723 case OMP_CLAUSE_COLLAPSE:
1724 case OMP_CLAUSE_UNTIED:
1725 case OMP_CLAUSE_MERGEABLE:
1726 case OMP_CLAUSE_PROC_BIND:
1727 case OMP_CLAUSE_SAFELEN:
1728 case OMP_CLAUSE_SIMDLEN:
1729 case OMP_CLAUSE_THREADS:
1730 case OMP_CLAUSE_SIMD:
1731 case OMP_CLAUSE_NOGROUP:
1732 case OMP_CLAUSE_DEFAULTMAP:
1733 case OMP_CLAUSE_ASYNC:
1734 case OMP_CLAUSE_WAIT:
1735 case OMP_CLAUSE_GANG:
1736 case OMP_CLAUSE_WORKER:
1737 case OMP_CLAUSE_VECTOR:
1738 case OMP_CLAUSE_INDEPENDENT:
1739 case OMP_CLAUSE_AUTO:
1740 case OMP_CLAUSE_SEQ:
1741 case OMP_CLAUSE_TILE:
1742 case OMP_CLAUSE__SIMT_:
1743 case OMP_CLAUSE_DEFAULT:
1744 case OMP_CLAUSE_NONTEMPORAL:
1745 case OMP_CLAUSE_IF_PRESENT:
1746 case OMP_CLAUSE_FINALIZE:
1747 case OMP_CLAUSE_TASK_REDUCTION:
1748 case OMP_CLAUSE_ALLOCATE:
1749 break;
1751 case OMP_CLAUSE_ALIGNED:
1752 decl = OMP_CLAUSE_DECL (c);
1753 if (is_global_var (decl)
1754 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1755 install_var_local (decl, ctx);
1756 break;
1758 case OMP_CLAUSE__CONDTEMP_:
1759 decl = OMP_CLAUSE_DECL (c);
1760 if (is_parallel_ctx (ctx))
1762 install_var_field (decl, false, 3, ctx);
1763 install_var_local (decl, ctx);
1765 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1766 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1767 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1768 install_var_local (decl, ctx);
1769 break;
1771 case OMP_CLAUSE__CACHE_:
1772 case OMP_CLAUSE_NOHOST:
1773 default:
1774 gcc_unreachable ();
1778 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1780 switch (OMP_CLAUSE_CODE (c))
1782 case OMP_CLAUSE_LASTPRIVATE:
1783 /* Let the corresponding firstprivate clause create
1784 the variable. */
1785 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1786 scan_array_reductions = true;
1787 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1788 break;
1789 /* FALLTHRU */
1791 case OMP_CLAUSE_FIRSTPRIVATE:
1792 case OMP_CLAUSE_PRIVATE:
1793 case OMP_CLAUSE_LINEAR:
1794 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1795 case OMP_CLAUSE_IS_DEVICE_PTR:
1796 decl = OMP_CLAUSE_DECL (c);
1797 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1799 while (TREE_CODE (decl) == INDIRECT_REF
1800 || TREE_CODE (decl) == ARRAY_REF)
1801 decl = TREE_OPERAND (decl, 0);
1804 if (is_variable_sized (decl))
1806 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1807 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1808 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1809 && is_gimple_omp_offloaded (ctx->stmt))
1811 tree decl2 = DECL_VALUE_EXPR (decl);
1812 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1813 decl2 = TREE_OPERAND (decl2, 0);
1814 gcc_assert (DECL_P (decl2));
1815 install_var_local (decl2, ctx);
1816 fixup_remapped_decl (decl2, ctx, false);
1818 install_var_local (decl, ctx);
1820 fixup_remapped_decl (decl, ctx,
1821 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1822 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1823 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1824 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1825 scan_array_reductions = true;
1826 break;
1828 case OMP_CLAUSE_REDUCTION:
1829 case OMP_CLAUSE_IN_REDUCTION:
1830 decl = OMP_CLAUSE_DECL (c);
1831 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1833 if (is_variable_sized (decl))
1834 install_var_local (decl, ctx);
1835 fixup_remapped_decl (decl, ctx, false);
1837 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1838 scan_array_reductions = true;
1839 break;
1841 case OMP_CLAUSE_TASK_REDUCTION:
1842 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1843 scan_array_reductions = true;
1844 break;
1846 case OMP_CLAUSE_SHARED:
1847 /* Ignore shared directives in teams construct inside of
1848 target construct. */
1849 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1850 && !is_host_teams_ctx (ctx))
1851 break;
1852 decl = OMP_CLAUSE_DECL (c);
1853 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1854 break;
1855 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1857 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1858 ctx->outer)))
1859 break;
1860 bool by_ref = use_pointer_for_field (decl, ctx);
1861 install_var_field (decl, by_ref, 11, ctx);
1862 break;
1864 fixup_remapped_decl (decl, ctx, false);
1865 break;
1867 case OMP_CLAUSE_MAP:
1868 if (!is_gimple_omp_offloaded (ctx->stmt))
1869 break;
1870 decl = OMP_CLAUSE_DECL (c);
1871 if (DECL_P (decl)
1872 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1873 && (OMP_CLAUSE_MAP_KIND (c)
1874 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1875 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1876 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1877 && varpool_node::get_create (decl)->offloadable)
1878 break;
1879 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1880 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1881 && is_omp_target (ctx->stmt)
1882 && !is_gimple_omp_offloaded (ctx->stmt))
1883 break;
1884 if (DECL_P (decl))
1886 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1887 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1888 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1889 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1891 tree new_decl = lookup_decl (decl, ctx);
1892 TREE_TYPE (new_decl)
1893 = remap_type (TREE_TYPE (decl), &ctx->cb);
1895 else if (DECL_SIZE (decl)
1896 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1898 tree decl2 = DECL_VALUE_EXPR (decl);
1899 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1900 decl2 = TREE_OPERAND (decl2, 0);
1901 gcc_assert (DECL_P (decl2));
1902 fixup_remapped_decl (decl2, ctx, false);
1903 fixup_remapped_decl (decl, ctx, true);
1905 else
1906 fixup_remapped_decl (decl, ctx, false);
1908 break;
1910 case OMP_CLAUSE_COPYPRIVATE:
1911 case OMP_CLAUSE_COPYIN:
1912 case OMP_CLAUSE_DEFAULT:
1913 case OMP_CLAUSE_IF:
1914 case OMP_CLAUSE_NUM_THREADS:
1915 case OMP_CLAUSE_NUM_TEAMS:
1916 case OMP_CLAUSE_THREAD_LIMIT:
1917 case OMP_CLAUSE_DEVICE:
1918 case OMP_CLAUSE_SCHEDULE:
1919 case OMP_CLAUSE_DIST_SCHEDULE:
1920 case OMP_CLAUSE_NOWAIT:
1921 case OMP_CLAUSE_ORDERED:
1922 case OMP_CLAUSE_COLLAPSE:
1923 case OMP_CLAUSE_UNTIED:
1924 case OMP_CLAUSE_FINAL:
1925 case OMP_CLAUSE_MERGEABLE:
1926 case OMP_CLAUSE_PROC_BIND:
1927 case OMP_CLAUSE_SAFELEN:
1928 case OMP_CLAUSE_SIMDLEN:
1929 case OMP_CLAUSE_ALIGNED:
1930 case OMP_CLAUSE_DEPEND:
1931 case OMP_CLAUSE_DETACH:
1932 case OMP_CLAUSE_ALLOCATE:
1933 case OMP_CLAUSE__LOOPTEMP_:
1934 case OMP_CLAUSE__REDUCTEMP_:
1935 case OMP_CLAUSE_TO:
1936 case OMP_CLAUSE_FROM:
1937 case OMP_CLAUSE_PRIORITY:
1938 case OMP_CLAUSE_GRAINSIZE:
1939 case OMP_CLAUSE_NUM_TASKS:
1940 case OMP_CLAUSE_THREADS:
1941 case OMP_CLAUSE_SIMD:
1942 case OMP_CLAUSE_NOGROUP:
1943 case OMP_CLAUSE_DEFAULTMAP:
1944 case OMP_CLAUSE_ORDER:
1945 case OMP_CLAUSE_BIND:
1946 case OMP_CLAUSE_USE_DEVICE_PTR:
1947 case OMP_CLAUSE_USE_DEVICE_ADDR:
1948 case OMP_CLAUSE_NONTEMPORAL:
1949 case OMP_CLAUSE_ASYNC:
1950 case OMP_CLAUSE_WAIT:
1951 case OMP_CLAUSE_NUM_GANGS:
1952 case OMP_CLAUSE_NUM_WORKERS:
1953 case OMP_CLAUSE_VECTOR_LENGTH:
1954 case OMP_CLAUSE_GANG:
1955 case OMP_CLAUSE_WORKER:
1956 case OMP_CLAUSE_VECTOR:
1957 case OMP_CLAUSE_INDEPENDENT:
1958 case OMP_CLAUSE_AUTO:
1959 case OMP_CLAUSE_SEQ:
1960 case OMP_CLAUSE_TILE:
1961 case OMP_CLAUSE__SIMT_:
1962 case OMP_CLAUSE_IF_PRESENT:
1963 case OMP_CLAUSE_FINALIZE:
1964 case OMP_CLAUSE_FILTER:
1965 case OMP_CLAUSE__CONDTEMP_:
1966 break;
1968 case OMP_CLAUSE__CACHE_:
1969 case OMP_CLAUSE_NOHOST:
1970 default:
1971 gcc_unreachable ();
1975 gcc_checking_assert (!scan_array_reductions
1976 || !is_gimple_omp_oacc (ctx->stmt));
1977 if (scan_array_reductions)
1979 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1980 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1981 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1982 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1983 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1985 omp_context *rctx = ctx;
1986 if (is_omp_target (ctx->stmt))
1987 rctx = ctx->outer;
1988 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1989 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1991 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1992 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1993 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1994 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1995 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1996 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2000 /* Create a new name for omp child function. Returns an identifier. */
2002 static tree
2003 create_omp_child_function_name (bool task_copy)
2005 return clone_function_name_numbered (current_function_decl,
2006 task_copy ? "_omp_cpyfn" : "_omp_fn");
2009 /* Return true if CTX may belong to offloaded code: either if current function
2010 is offloaded, or any enclosing context corresponds to a target region. */
2012 static bool
2013 omp_maybe_offloaded_ctx (omp_context *ctx)
2015 if (cgraph_node::get (current_function_decl)->offloadable)
2016 return true;
2017 for (; ctx; ctx = ctx->outer)
2018 if (is_gimple_omp_offloaded (ctx->stmt))
2019 return true;
2020 return false;
2023 /* Build a decl for the omp child function. It'll not contain a body
2024 yet, just the bare decl. */
2026 static void
2027 create_omp_child_function (omp_context *ctx, bool task_copy)
2029 tree decl, type, name, t;
2031 name = create_omp_child_function_name (task_copy);
2032 if (task_copy)
2033 type = build_function_type_list (void_type_node, ptr_type_node,
2034 ptr_type_node, NULL_TREE);
2035 else
2036 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2038 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2040 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2041 || !task_copy);
2042 if (!task_copy)
2043 ctx->cb.dst_fn = decl;
2044 else
2045 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2047 TREE_STATIC (decl) = 1;
2048 TREE_USED (decl) = 1;
2049 DECL_ARTIFICIAL (decl) = 1;
2050 DECL_IGNORED_P (decl) = 0;
2051 TREE_PUBLIC (decl) = 0;
2052 DECL_UNINLINABLE (decl) = 1;
2053 DECL_EXTERNAL (decl) = 0;
2054 DECL_CONTEXT (decl) = NULL_TREE;
2055 DECL_INITIAL (decl) = make_node (BLOCK);
2056 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2057 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2058 /* Remove omp declare simd attribute from the new attributes. */
2059 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2061 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2062 a = a2;
2063 a = TREE_CHAIN (a);
2064 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2065 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2066 *p = TREE_CHAIN (*p);
2067 else
2069 tree chain = TREE_CHAIN (*p);
2070 *p = copy_node (*p);
2071 p = &TREE_CHAIN (*p);
2072 *p = chain;
2075 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2076 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2077 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2078 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2079 DECL_FUNCTION_VERSIONED (decl)
2080 = DECL_FUNCTION_VERSIONED (current_function_decl);
2082 if (omp_maybe_offloaded_ctx (ctx))
2084 cgraph_node::get_create (decl)->offloadable = 1;
2085 if (ENABLE_OFFLOADING)
2086 g->have_offload = true;
2089 if (cgraph_node::get_create (decl)->offloadable)
2091 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2092 ? "omp target entrypoint"
2093 : "omp declare target");
2094 if (lookup_attribute ("omp declare target",
2095 DECL_ATTRIBUTES (current_function_decl)))
2097 if (is_gimple_omp_offloaded (ctx->stmt))
2098 DECL_ATTRIBUTES (decl)
2099 = remove_attribute ("omp declare target",
2100 copy_list (DECL_ATTRIBUTES (decl)));
2101 else
2102 target_attr = NULL;
2104 if (target_attr)
2105 DECL_ATTRIBUTES (decl)
2106 = tree_cons (get_identifier (target_attr),
2107 NULL_TREE, DECL_ATTRIBUTES (decl));
2110 t = build_decl (DECL_SOURCE_LOCATION (decl),
2111 RESULT_DECL, NULL_TREE, void_type_node);
2112 DECL_ARTIFICIAL (t) = 1;
2113 DECL_IGNORED_P (t) = 1;
2114 DECL_CONTEXT (t) = decl;
2115 DECL_RESULT (decl) = t;
2117 tree data_name = get_identifier (".omp_data_i");
2118 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2119 ptr_type_node);
2120 DECL_ARTIFICIAL (t) = 1;
2121 DECL_NAMELESS (t) = 1;
2122 DECL_ARG_TYPE (t) = ptr_type_node;
2123 DECL_CONTEXT (t) = current_function_decl;
2124 TREE_USED (t) = 1;
2125 TREE_READONLY (t) = 1;
2126 DECL_ARGUMENTS (decl) = t;
2127 if (!task_copy)
2128 ctx->receiver_decl = t;
2129 else
2131 t = build_decl (DECL_SOURCE_LOCATION (decl),
2132 PARM_DECL, get_identifier (".omp_data_o"),
2133 ptr_type_node);
2134 DECL_ARTIFICIAL (t) = 1;
2135 DECL_NAMELESS (t) = 1;
2136 DECL_ARG_TYPE (t) = ptr_type_node;
2137 DECL_CONTEXT (t) = current_function_decl;
2138 TREE_USED (t) = 1;
2139 TREE_ADDRESSABLE (t) = 1;
2140 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2141 DECL_ARGUMENTS (decl) = t;
2144 /* Allocate memory for the function structure. The call to
2145 allocate_struct_function clobbers CFUN, so we need to restore
2146 it afterward. */
2147 push_struct_function (decl);
2148 cfun->function_end_locus = gimple_location (ctx->stmt);
2149 init_tree_ssa (cfun);
2150 pop_cfun ();
2153 /* Callback for walk_gimple_seq. Check if combined parallel
2154 contains gimple_omp_for_combined_into_p OMP_FOR. */
2156 tree
2157 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2158 bool *handled_ops_p,
2159 struct walk_stmt_info *wi)
2161 gimple *stmt = gsi_stmt (*gsi_p);
2163 *handled_ops_p = true;
2164 switch (gimple_code (stmt))
2166 WALK_SUBSTMTS;
2168 case GIMPLE_OMP_FOR:
2169 if (gimple_omp_for_combined_into_p (stmt)
2170 && gimple_omp_for_kind (stmt)
2171 == *(const enum gf_mask *) (wi->info))
2173 wi->info = stmt;
2174 return integer_zero_node;
2176 break;
2177 default:
2178 break;
2180 return NULL;
2183 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2185 static void
2186 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2187 omp_context *outer_ctx)
2189 struct walk_stmt_info wi;
2191 memset (&wi, 0, sizeof (wi));
2192 wi.val_only = true;
2193 wi.info = (void *) &msk;
2194 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2195 if (wi.info != (void *) &msk)
2197 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2198 struct omp_for_data fd;
2199 omp_extract_for_data (for_stmt, &fd, NULL);
2200 /* We need two temporaries with fd.loop.v type (istart/iend)
2201 and then (fd.collapse - 1) temporaries with the same
2202 type for count2 ... countN-1 vars if not constant. */
2203 size_t count = 2, i;
2204 tree type = fd.iter_type;
2205 if (fd.collapse > 1
2206 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2208 count += fd.collapse - 1;
2209 /* If there are lastprivate clauses on the inner
2210 GIMPLE_OMP_FOR, add one more temporaries for the total number
2211 of iterations (product of count1 ... countN-1). */
2212 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2213 OMP_CLAUSE_LASTPRIVATE)
2214 || (msk == GF_OMP_FOR_KIND_FOR
2215 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2216 OMP_CLAUSE_LASTPRIVATE)))
2218 tree temp = create_tmp_var (type);
2219 tree c = build_omp_clause (UNKNOWN_LOCATION,
2220 OMP_CLAUSE__LOOPTEMP_);
2221 insert_decl_map (&outer_ctx->cb, temp, temp);
2222 OMP_CLAUSE_DECL (c) = temp;
2223 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2224 gimple_omp_taskreg_set_clauses (stmt, c);
2226 if (fd.non_rect
2227 && fd.last_nonrect == fd.first_nonrect + 1)
2228 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2229 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2231 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2232 tree type2 = TREE_TYPE (v);
2233 count++;
2234 for (i = 0; i < 3; i++)
2236 tree temp = create_tmp_var (type2);
2237 tree c = build_omp_clause (UNKNOWN_LOCATION,
2238 OMP_CLAUSE__LOOPTEMP_);
2239 insert_decl_map (&outer_ctx->cb, temp, temp);
2240 OMP_CLAUSE_DECL (c) = temp;
2241 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2242 gimple_omp_taskreg_set_clauses (stmt, c);
2246 for (i = 0; i < count; i++)
2248 tree temp = create_tmp_var (type);
2249 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2250 insert_decl_map (&outer_ctx->cb, temp, temp);
2251 OMP_CLAUSE_DECL (c) = temp;
2252 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2253 gimple_omp_taskreg_set_clauses (stmt, c);
2256 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2257 && omp_find_clause (gimple_omp_task_clauses (stmt),
2258 OMP_CLAUSE_REDUCTION))
2260 tree type = build_pointer_type (pointer_sized_int_node);
2261 tree temp = create_tmp_var (type);
2262 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2263 insert_decl_map (&outer_ctx->cb, temp, temp);
2264 OMP_CLAUSE_DECL (c) = temp;
2265 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2266 gimple_omp_task_set_clauses (stmt, c);
2270 /* Scan an OpenMP parallel directive. */
2272 static void
2273 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2275 omp_context *ctx;
2276 tree name;
2277 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2279 /* Ignore parallel directives with empty bodies, unless there
2280 are copyin clauses. */
2281 if (optimize > 0
2282 && empty_body_p (gimple_omp_body (stmt))
2283 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2284 OMP_CLAUSE_COPYIN) == NULL)
2286 gsi_replace (gsi, gimple_build_nop (), false);
2287 return;
2290 if (gimple_omp_parallel_combined_p (stmt))
2291 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2292 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2293 OMP_CLAUSE_REDUCTION);
2294 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2295 if (OMP_CLAUSE_REDUCTION_TASK (c))
2297 tree type = build_pointer_type (pointer_sized_int_node);
2298 tree temp = create_tmp_var (type);
2299 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2300 if (outer_ctx)
2301 insert_decl_map (&outer_ctx->cb, temp, temp);
2302 OMP_CLAUSE_DECL (c) = temp;
2303 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2304 gimple_omp_parallel_set_clauses (stmt, c);
2305 break;
2307 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2308 break;
2310 ctx = new_omp_context (stmt, outer_ctx);
2311 taskreg_contexts.safe_push (ctx);
2312 if (taskreg_nesting_level > 1)
2313 ctx->is_nested = true;
2314 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2315 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2316 name = create_tmp_var_name (".omp_data_s");
2317 name = build_decl (gimple_location (stmt),
2318 TYPE_DECL, name, ctx->record_type);
2319 DECL_ARTIFICIAL (name) = 1;
2320 DECL_NAMELESS (name) = 1;
2321 TYPE_NAME (ctx->record_type) = name;
2322 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2323 create_omp_child_function (ctx, false);
2324 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2326 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2327 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2329 if (TYPE_FIELDS (ctx->record_type) == NULL)
2330 ctx->record_type = ctx->receiver_decl = NULL;
2333 /* Scan an OpenMP task directive. */
2335 static void
2336 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2338 omp_context *ctx;
2339 tree name, t;
2340 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2342 /* Ignore task directives with empty bodies, unless they have depend
2343 clause. */
2344 if (optimize > 0
2345 && gimple_omp_body (stmt)
2346 && empty_body_p (gimple_omp_body (stmt))
2347 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2349 gsi_replace (gsi, gimple_build_nop (), false);
2350 return;
2353 if (gimple_omp_task_taskloop_p (stmt))
2354 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2356 ctx = new_omp_context (stmt, outer_ctx);
2358 if (gimple_omp_task_taskwait_p (stmt))
2360 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2361 return;
2364 taskreg_contexts.safe_push (ctx);
2365 if (taskreg_nesting_level > 1)
2366 ctx->is_nested = true;
2367 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2368 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2369 name = create_tmp_var_name (".omp_data_s");
2370 name = build_decl (gimple_location (stmt),
2371 TYPE_DECL, name, ctx->record_type);
2372 DECL_ARTIFICIAL (name) = 1;
2373 DECL_NAMELESS (name) = 1;
2374 TYPE_NAME (ctx->record_type) = name;
2375 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2376 create_omp_child_function (ctx, false);
2377 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2379 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2381 if (ctx->srecord_type)
2383 name = create_tmp_var_name (".omp_data_a");
2384 name = build_decl (gimple_location (stmt),
2385 TYPE_DECL, name, ctx->srecord_type);
2386 DECL_ARTIFICIAL (name) = 1;
2387 DECL_NAMELESS (name) = 1;
2388 TYPE_NAME (ctx->srecord_type) = name;
2389 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2390 create_omp_child_function (ctx, true);
2393 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2395 if (TYPE_FIELDS (ctx->record_type) == NULL)
2397 ctx->record_type = ctx->receiver_decl = NULL;
2398 t = build_int_cst (long_integer_type_node, 0);
2399 gimple_omp_task_set_arg_size (stmt, t);
2400 t = build_int_cst (long_integer_type_node, 1);
2401 gimple_omp_task_set_arg_align (stmt, t);
2405 /* Helper function for finish_taskreg_scan, called through walk_tree.
2406 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2407 tree, replace it in the expression. */
2409 static tree
2410 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2412 if (VAR_P (*tp))
2414 omp_context *ctx = (omp_context *) data;
2415 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2416 if (t != *tp)
2418 if (DECL_HAS_VALUE_EXPR_P (t))
2419 t = unshare_expr (DECL_VALUE_EXPR (t));
2420 *tp = t;
2422 *walk_subtrees = 0;
2424 else if (IS_TYPE_OR_DECL_P (*tp))
2425 *walk_subtrees = 0;
2426 return NULL_TREE;
2429 /* If any decls have been made addressable during scan_omp,
2430 adjust their fields if needed, and layout record types
2431 of parallel/task constructs. */
2433 static void
2434 finish_taskreg_scan (omp_context *ctx)
2436 if (ctx->record_type == NULL_TREE)
2437 return;
2439 /* If any make_addressable_vars were needed, verify all
2440 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2441 statements if use_pointer_for_field hasn't changed
2442 because of that. If it did, update field types now. */
2443 if (make_addressable_vars)
2445 tree c;
2447 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2448 c; c = OMP_CLAUSE_CHAIN (c))
2449 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2450 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2452 tree decl = OMP_CLAUSE_DECL (c);
2454 /* Global variables don't need to be copied,
2455 the receiver side will use them directly. */
2456 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2457 continue;
2458 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2459 || !use_pointer_for_field (decl, ctx))
2460 continue;
2461 tree field = lookup_field (decl, ctx);
2462 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2463 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2464 continue;
2465 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2466 TREE_THIS_VOLATILE (field) = 0;
2467 DECL_USER_ALIGN (field) = 0;
2468 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2469 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2470 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2471 if (ctx->srecord_type)
2473 tree sfield = lookup_sfield (decl, ctx);
2474 TREE_TYPE (sfield) = TREE_TYPE (field);
2475 TREE_THIS_VOLATILE (sfield) = 0;
2476 DECL_USER_ALIGN (sfield) = 0;
2477 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2478 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2479 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2484 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2486 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2487 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2488 if (c)
2490 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2491 expects to find it at the start of data. */
2492 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2493 tree *p = &TYPE_FIELDS (ctx->record_type);
2494 while (*p)
2495 if (*p == f)
2497 *p = DECL_CHAIN (*p);
2498 break;
2500 else
2501 p = &DECL_CHAIN (*p);
2502 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2503 TYPE_FIELDS (ctx->record_type) = f;
2505 layout_type (ctx->record_type);
2506 fixup_child_record_type (ctx);
2508 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2510 layout_type (ctx->record_type);
2511 fixup_child_record_type (ctx);
2513 else
2515 location_t loc = gimple_location (ctx->stmt);
2516 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2517 tree detach_clause
2518 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2519 OMP_CLAUSE_DETACH);
2520 /* Move VLA fields to the end. */
2521 p = &TYPE_FIELDS (ctx->record_type);
2522 while (*p)
2523 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2524 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2526 *q = *p;
2527 *p = TREE_CHAIN (*p);
2528 TREE_CHAIN (*q) = NULL_TREE;
2529 q = &TREE_CHAIN (*q);
2531 else
2532 p = &DECL_CHAIN (*p);
2533 *p = vla_fields;
2534 if (gimple_omp_task_taskloop_p (ctx->stmt))
2536 /* Move fields corresponding to first and second _looptemp_
2537 clause first. There are filled by GOMP_taskloop
2538 and thus need to be in specific positions. */
2539 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2540 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2541 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2542 OMP_CLAUSE__LOOPTEMP_);
2543 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2544 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2545 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2546 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2547 p = &TYPE_FIELDS (ctx->record_type);
2548 while (*p)
2549 if (*p == f1 || *p == f2 || *p == f3)
2550 *p = DECL_CHAIN (*p);
2551 else
2552 p = &DECL_CHAIN (*p);
2553 DECL_CHAIN (f1) = f2;
2554 if (c3)
2556 DECL_CHAIN (f2) = f3;
2557 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2559 else
2560 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2561 TYPE_FIELDS (ctx->record_type) = f1;
2562 if (ctx->srecord_type)
2564 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2565 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2566 if (c3)
2567 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2568 p = &TYPE_FIELDS (ctx->srecord_type);
2569 while (*p)
2570 if (*p == f1 || *p == f2 || *p == f3)
2571 *p = DECL_CHAIN (*p);
2572 else
2573 p = &DECL_CHAIN (*p);
2574 DECL_CHAIN (f1) = f2;
2575 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2576 if (c3)
2578 DECL_CHAIN (f2) = f3;
2579 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2581 else
2582 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2583 TYPE_FIELDS (ctx->srecord_type) = f1;
2586 if (detach_clause)
2588 tree c, field;
2590 /* Look for a firstprivate clause with the detach event handle. */
2591 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2592 c; c = OMP_CLAUSE_CHAIN (c))
2594 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2595 continue;
2596 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2597 == OMP_CLAUSE_DECL (detach_clause))
2598 break;
2601 gcc_assert (c);
2602 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2604 /* Move field corresponding to the detach clause first.
2605 This is filled by GOMP_task and needs to be in a
2606 specific position. */
2607 p = &TYPE_FIELDS (ctx->record_type);
2608 while (*p)
2609 if (*p == field)
2610 *p = DECL_CHAIN (*p);
2611 else
2612 p = &DECL_CHAIN (*p);
2613 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2614 TYPE_FIELDS (ctx->record_type) = field;
2615 if (ctx->srecord_type)
2617 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2618 p = &TYPE_FIELDS (ctx->srecord_type);
2619 while (*p)
2620 if (*p == field)
2621 *p = DECL_CHAIN (*p);
2622 else
2623 p = &DECL_CHAIN (*p);
2624 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2625 TYPE_FIELDS (ctx->srecord_type) = field;
2628 layout_type (ctx->record_type);
2629 fixup_child_record_type (ctx);
2630 if (ctx->srecord_type)
2631 layout_type (ctx->srecord_type);
2632 tree t = fold_convert_loc (loc, long_integer_type_node,
2633 TYPE_SIZE_UNIT (ctx->record_type));
2634 if (TREE_CODE (t) != INTEGER_CST)
2636 t = unshare_expr (t);
2637 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2639 gimple_omp_task_set_arg_size (ctx->stmt, t);
2640 t = build_int_cst (long_integer_type_node,
2641 TYPE_ALIGN_UNIT (ctx->record_type));
2642 gimple_omp_task_set_arg_align (ctx->stmt, t);
2646 /* Find the enclosing offload context. */
2648 static omp_context *
2649 enclosing_target_ctx (omp_context *ctx)
2651 for (; ctx; ctx = ctx->outer)
2652 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2653 break;
2655 return ctx;
2658 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2659 construct.
2660 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2662 static bool
2663 ctx_in_oacc_kernels_region (omp_context *ctx)
2665 for (;ctx != NULL; ctx = ctx->outer)
2667 gimple *stmt = ctx->stmt;
2668 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2669 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2670 return true;
2673 return false;
2676 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2677 (This doesn't include OpenACC 'kernels' decomposed parts.)
2678 Until kernels handling moves to use the same loop indirection
2679 scheme as parallel, we need to do this checking early. */
2681 static unsigned
2682 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2684 bool checking = true;
2685 unsigned outer_mask = 0;
2686 unsigned this_mask = 0;
2687 bool has_seq = false, has_auto = false;
2689 if (ctx->outer)
2690 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2691 if (!stmt)
2693 checking = false;
2694 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2695 return outer_mask;
2696 stmt = as_a <gomp_for *> (ctx->stmt);
2699 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2701 switch (OMP_CLAUSE_CODE (c))
2703 case OMP_CLAUSE_GANG:
2704 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2705 break;
2706 case OMP_CLAUSE_WORKER:
2707 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2708 break;
2709 case OMP_CLAUSE_VECTOR:
2710 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2711 break;
2712 case OMP_CLAUSE_SEQ:
2713 has_seq = true;
2714 break;
2715 case OMP_CLAUSE_AUTO:
2716 has_auto = true;
2717 break;
2718 default:
2719 break;
2723 if (checking)
2725 if (has_seq && (this_mask || has_auto))
2726 error_at (gimple_location (stmt), "%<seq%> overrides other"
2727 " OpenACC loop specifiers");
2728 else if (has_auto && this_mask)
2729 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2730 " OpenACC loop specifiers");
2732 if (this_mask & outer_mask)
2733 error_at (gimple_location (stmt), "inner loop uses same"
2734 " OpenACC parallelism as containing loop");
2737 return outer_mask | this_mask;
2740 /* Scan a GIMPLE_OMP_FOR. */
2742 static omp_context *
2743 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2745 omp_context *ctx;
2746 size_t i;
2747 tree clauses = gimple_omp_for_clauses (stmt);
2749 ctx = new_omp_context (stmt, outer_ctx);
2751 if (is_gimple_omp_oacc (stmt))
2753 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2755 if (!(tgt && is_oacc_kernels (tgt)))
2756 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2758 tree c_op0;
2759 switch (OMP_CLAUSE_CODE (c))
2761 case OMP_CLAUSE_GANG:
2762 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2763 break;
2765 case OMP_CLAUSE_WORKER:
2766 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2767 break;
2769 case OMP_CLAUSE_VECTOR:
2770 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2771 break;
2773 default:
2774 continue;
2777 if (c_op0)
2779 /* By construction, this is impossible for OpenACC 'kernels'
2780 decomposed parts. */
2781 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2783 error_at (OMP_CLAUSE_LOCATION (c),
2784 "argument not permitted on %qs clause",
2785 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2786 if (tgt)
2787 inform (gimple_location (tgt->stmt),
2788 "enclosing parent compute construct");
2789 else if (oacc_get_fn_attrib (current_function_decl))
2790 inform (DECL_SOURCE_LOCATION (current_function_decl),
2791 "enclosing routine");
2792 else
2793 gcc_unreachable ();
2797 if (tgt && is_oacc_kernels (tgt))
2798 check_oacc_kernel_gwv (stmt, ctx);
2800 /* Collect all variables named in reductions on this loop. Ensure
2801 that, if this loop has a reduction on some variable v, and there is
2802 a reduction on v somewhere in an outer context, then there is a
2803 reduction on v on all intervening loops as well. */
2804 tree local_reduction_clauses = NULL;
2805 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2807 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2808 local_reduction_clauses
2809 = tree_cons (NULL, c, local_reduction_clauses);
2811 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2812 ctx->outer_reduction_clauses
2813 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2814 ctx->outer->outer_reduction_clauses);
2815 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2816 tree local_iter = local_reduction_clauses;
2817 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2819 tree local_clause = TREE_VALUE (local_iter);
2820 tree local_var = OMP_CLAUSE_DECL (local_clause);
2821 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2822 bool have_outer_reduction = false;
2823 tree ctx_iter = outer_reduction_clauses;
2824 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2826 tree outer_clause = TREE_VALUE (ctx_iter);
2827 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2828 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2829 if (outer_var == local_var && outer_op != local_op)
2831 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2832 "conflicting reduction operations for %qE",
2833 local_var);
2834 inform (OMP_CLAUSE_LOCATION (outer_clause),
2835 "location of the previous reduction for %qE",
2836 outer_var);
2838 if (outer_var == local_var)
2840 have_outer_reduction = true;
2841 break;
2844 if (have_outer_reduction)
2846 /* There is a reduction on outer_var both on this loop and on
2847 some enclosing loop. Walk up the context tree until such a
2848 loop with a reduction on outer_var is found, and complain
2849 about all intervening loops that do not have such a
2850 reduction. */
2851 struct omp_context *curr_loop = ctx->outer;
2852 bool found = false;
2853 while (curr_loop != NULL)
2855 tree curr_iter = curr_loop->local_reduction_clauses;
2856 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2858 tree curr_clause = TREE_VALUE (curr_iter);
2859 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2860 if (curr_var == local_var)
2862 found = true;
2863 break;
2866 if (!found)
2867 warning_at (gimple_location (curr_loop->stmt), 0,
2868 "nested loop in reduction needs "
2869 "reduction clause for %qE",
2870 local_var);
2871 else
2872 break;
2873 curr_loop = curr_loop->outer;
2877 ctx->local_reduction_clauses = local_reduction_clauses;
2878 ctx->outer_reduction_clauses
2879 = chainon (unshare_expr (ctx->local_reduction_clauses),
2880 ctx->outer_reduction_clauses);
2882 if (tgt && is_oacc_kernels (tgt))
2884 /* Strip out reductions, as they are not handled yet. */
2885 tree *prev_ptr = &clauses;
2887 while (tree probe = *prev_ptr)
2889 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2891 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2892 *prev_ptr = *next_ptr;
2893 else
2894 prev_ptr = next_ptr;
2897 gimple_omp_for_set_clauses (stmt, clauses);
2901 scan_sharing_clauses (clauses, ctx);
2903 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2904 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2906 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2907 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2908 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2909 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2911 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2912 return ctx;
2915 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2917 static void
2918 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2919 omp_context *outer_ctx)
2921 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2922 gsi_replace (gsi, bind, false);
2923 gimple_seq seq = NULL;
2924 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2925 tree cond = create_tmp_var_raw (integer_type_node);
2926 DECL_CONTEXT (cond) = current_function_decl;
2927 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2928 gimple_bind_set_vars (bind, cond);
2929 gimple_call_set_lhs (g, cond);
2930 gimple_seq_add_stmt (&seq, g);
2931 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2932 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2933 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2934 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2935 gimple_seq_add_stmt (&seq, g);
2936 g = gimple_build_label (lab1);
2937 gimple_seq_add_stmt (&seq, g);
2938 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2939 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2940 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2941 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2942 gimple_omp_for_set_clauses (new_stmt, clause);
2943 gimple_seq_add_stmt (&seq, new_stmt);
2944 g = gimple_build_goto (lab3);
2945 gimple_seq_add_stmt (&seq, g);
2946 g = gimple_build_label (lab2);
2947 gimple_seq_add_stmt (&seq, g);
2948 gimple_seq_add_stmt (&seq, stmt);
2949 g = gimple_build_label (lab3);
2950 gimple_seq_add_stmt (&seq, g);
2951 gimple_bind_set_body (bind, seq);
2952 update_stmt (bind);
2953 scan_omp_for (new_stmt, outer_ctx);
2954 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2957 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2958 struct walk_stmt_info *);
2959 static omp_context *maybe_lookup_ctx (gimple *);
2961 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2962 for scan phase loop. */
2964 static void
2965 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2966 omp_context *outer_ctx)
2968 /* The only change between inclusive and exclusive scan will be
2969 within the first simd loop, so just use inclusive in the
2970 worksharing loop. */
2971 outer_ctx->scan_inclusive = true;
2972 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2973 OMP_CLAUSE_DECL (c) = integer_zero_node;
2975 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2976 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2977 gsi_replace (gsi, input_stmt, false);
2978 gimple_seq input_body = NULL;
2979 gimple_seq_add_stmt (&input_body, stmt);
2980 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2982 gimple_stmt_iterator input1_gsi = gsi_none ();
2983 struct walk_stmt_info wi;
2984 memset (&wi, 0, sizeof (wi));
2985 wi.val_only = true;
2986 wi.info = (void *) &input1_gsi;
2987 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2988 gcc_assert (!gsi_end_p (input1_gsi));
2990 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2991 gsi_next (&input1_gsi);
2992 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2993 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2994 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2995 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2996 std::swap (input_stmt1, scan_stmt1);
2998 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2999 gimple_omp_set_body (input_stmt1, NULL);
3001 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
3002 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
3004 gimple_omp_set_body (input_stmt1, input_body1);
3005 gimple_omp_set_body (scan_stmt1, NULL);
3007 gimple_stmt_iterator input2_gsi = gsi_none ();
3008 memset (&wi, 0, sizeof (wi));
3009 wi.val_only = true;
3010 wi.info = (void *) &input2_gsi;
3011 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
3012 NULL, &wi);
3013 gcc_assert (!gsi_end_p (input2_gsi));
3015 gimple *input_stmt2 = gsi_stmt (input2_gsi);
3016 gsi_next (&input2_gsi);
3017 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
3018 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3019 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3020 std::swap (input_stmt2, scan_stmt2);
3022 gimple_omp_set_body (input_stmt2, NULL);
3024 gimple_omp_set_body (input_stmt, input_body);
3025 gimple_omp_set_body (scan_stmt, scan_body);
3027 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3028 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3030 ctx = new_omp_context (scan_stmt, outer_ctx);
3031 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3033 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3036 /* Scan an OpenMP sections directive. */
3038 static void
3039 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3041 omp_context *ctx;
3043 ctx = new_omp_context (stmt, outer_ctx);
3044 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3045 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3048 /* Scan an OpenMP single directive. */
3050 static void
3051 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3053 omp_context *ctx;
3054 tree name;
3056 ctx = new_omp_context (stmt, outer_ctx);
3057 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3058 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3059 name = create_tmp_var_name (".omp_copy_s");
3060 name = build_decl (gimple_location (stmt),
3061 TYPE_DECL, name, ctx->record_type);
3062 TYPE_NAME (ctx->record_type) = name;
3064 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3065 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3067 if (TYPE_FIELDS (ctx->record_type) == NULL)
3068 ctx->record_type = NULL;
3069 else
3070 layout_type (ctx->record_type);
3073 /* Scan a GIMPLE_OMP_TARGET. */
3075 static void
3076 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3078 omp_context *ctx;
3079 tree name;
3080 bool offloaded = is_gimple_omp_offloaded (stmt);
3081 tree clauses = gimple_omp_target_clauses (stmt);
3083 ctx = new_omp_context (stmt, outer_ctx);
3084 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3085 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3086 name = create_tmp_var_name (".omp_data_t");
3087 name = build_decl (gimple_location (stmt),
3088 TYPE_DECL, name, ctx->record_type);
3089 DECL_ARTIFICIAL (name) = 1;
3090 DECL_NAMELESS (name) = 1;
3091 TYPE_NAME (ctx->record_type) = name;
3092 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3094 if (offloaded)
3096 create_omp_child_function (ctx, false);
3097 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3100 scan_sharing_clauses (clauses, ctx);
3101 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3103 if (TYPE_FIELDS (ctx->record_type) == NULL)
3104 ctx->record_type = ctx->receiver_decl = NULL;
3105 else
3107 TYPE_FIELDS (ctx->record_type)
3108 = nreverse (TYPE_FIELDS (ctx->record_type));
3109 if (flag_checking)
3111 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3112 for (tree field = TYPE_FIELDS (ctx->record_type);
3113 field;
3114 field = DECL_CHAIN (field))
3115 gcc_assert (DECL_ALIGN (field) == align);
3117 layout_type (ctx->record_type);
3118 if (offloaded)
3119 fixup_child_record_type (ctx);
3122 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3124 error_at (gimple_location (stmt),
3125 "%<target%> construct with nested %<teams%> construct "
3126 "contains directives outside of the %<teams%> construct");
3127 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3131 /* Scan an OpenMP teams directive. */
3133 static void
3134 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3136 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3138 if (!gimple_omp_teams_host (stmt))
3140 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3141 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3142 return;
3144 taskreg_contexts.safe_push (ctx);
3145 gcc_assert (taskreg_nesting_level == 1);
3146 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3147 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3148 tree name = create_tmp_var_name (".omp_data_s");
3149 name = build_decl (gimple_location (stmt),
3150 TYPE_DECL, name, ctx->record_type);
3151 DECL_ARTIFICIAL (name) = 1;
3152 DECL_NAMELESS (name) = 1;
3153 TYPE_NAME (ctx->record_type) = name;
3154 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3155 create_omp_child_function (ctx, false);
3156 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3158 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3159 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3161 if (TYPE_FIELDS (ctx->record_type) == NULL)
3162 ctx->record_type = ctx->receiver_decl = NULL;
3165 /* Check nesting restrictions. */
3166 static bool
3167 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3169 tree c;
3171 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3172 inside an OpenACC CTX. */
3173 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3174 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3175 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3177 else if (!(is_gimple_omp (stmt)
3178 && is_gimple_omp_oacc (stmt)))
3180 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3182 error_at (gimple_location (stmt),
3183 "non-OpenACC construct inside of OpenACC routine");
3184 return false;
3186 else
3187 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3188 if (is_gimple_omp (octx->stmt)
3189 && is_gimple_omp_oacc (octx->stmt))
3191 error_at (gimple_location (stmt),
3192 "non-OpenACC construct inside of OpenACC region");
3193 return false;
3197 if (ctx != NULL)
3199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3200 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3202 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3203 OMP_CLAUSE_DEVICE);
3204 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3206 error_at (gimple_location (stmt),
3207 "OpenMP constructs are not allowed in target region "
3208 "with %<ancestor%>");
3209 return false;
3212 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3213 ctx->teams_nested_p = true;
3214 else
3215 ctx->nonteams_nested_p = true;
3217 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3218 && ctx->outer
3219 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3220 ctx = ctx->outer;
3221 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3222 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3223 && !ctx->loop_p)
3225 c = NULL_TREE;
3226 if (ctx->order_concurrent
3227 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3228 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3229 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3231 error_at (gimple_location (stmt),
3232 "OpenMP constructs other than %<parallel%>, %<loop%>"
3233 " or %<simd%> may not be nested inside a region with"
3234 " the %<order(concurrent)%> clause");
3235 return false;
3237 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3239 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3240 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3242 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3243 && (ctx->outer == NULL
3244 || !gimple_omp_for_combined_into_p (ctx->stmt)
3245 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3246 || (gimple_omp_for_kind (ctx->outer->stmt)
3247 != GF_OMP_FOR_KIND_FOR)
3248 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3250 error_at (gimple_location (stmt),
3251 "%<ordered simd threads%> must be closely "
3252 "nested inside of %<%s simd%> region",
3253 lang_GNU_Fortran () ? "do" : "for");
3254 return false;
3256 return true;
3259 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3260 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3261 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3262 return true;
3263 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3264 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3265 return true;
3266 error_at (gimple_location (stmt),
3267 "OpenMP constructs other than "
3268 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3269 "not be nested inside %<simd%> region");
3270 return false;
3272 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3274 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3275 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3276 && omp_find_clause (gimple_omp_for_clauses (stmt),
3277 OMP_CLAUSE_BIND) == NULL_TREE))
3278 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3280 error_at (gimple_location (stmt),
3281 "only %<distribute%>, %<parallel%> or %<loop%> "
3282 "regions are allowed to be strictly nested inside "
3283 "%<teams%> region");
3284 return false;
3287 else if (ctx->order_concurrent
3288 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3289 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3290 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3291 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3293 if (ctx->loop_p)
3294 error_at (gimple_location (stmt),
3295 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3296 "%<simd%> may not be nested inside a %<loop%> region");
3297 else
3298 error_at (gimple_location (stmt),
3299 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3300 "%<simd%> may not be nested inside a region with "
3301 "the %<order(concurrent)%> clause");
3302 return false;
3305 switch (gimple_code (stmt))
3307 case GIMPLE_OMP_FOR:
3308 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3309 return true;
3310 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3312 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3314 error_at (gimple_location (stmt),
3315 "%<distribute%> region must be strictly nested "
3316 "inside %<teams%> construct");
3317 return false;
3319 return true;
3321 /* We split taskloop into task and nested taskloop in it. */
3322 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3323 return true;
3324 /* For now, hope this will change and loop bind(parallel) will not
3325 be allowed in lots of contexts. */
3326 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3327 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3328 return true;
3329 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3331 bool ok = false;
3333 if (ctx)
3334 switch (gimple_code (ctx->stmt))
3336 case GIMPLE_OMP_FOR:
3337 ok = (gimple_omp_for_kind (ctx->stmt)
3338 == GF_OMP_FOR_KIND_OACC_LOOP);
3339 break;
3341 case GIMPLE_OMP_TARGET:
3342 switch (gimple_omp_target_kind (ctx->stmt))
3344 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3345 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3346 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3347 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3348 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3349 ok = true;
3350 break;
3352 default:
3353 break;
3356 default:
3357 break;
3359 else if (oacc_get_fn_attrib (current_function_decl))
3360 ok = true;
3361 if (!ok)
3363 error_at (gimple_location (stmt),
3364 "OpenACC loop directive must be associated with"
3365 " an OpenACC compute region");
3366 return false;
3369 /* FALLTHRU */
3370 case GIMPLE_CALL:
3371 if (is_gimple_call (stmt)
3372 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3373 == BUILT_IN_GOMP_CANCEL
3374 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3375 == BUILT_IN_GOMP_CANCELLATION_POINT))
3377 const char *bad = NULL;
3378 const char *kind = NULL;
3379 const char *construct
3380 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3381 == BUILT_IN_GOMP_CANCEL)
3382 ? "cancel"
3383 : "cancellation point";
3384 if (ctx == NULL)
3386 error_at (gimple_location (stmt), "orphaned %qs construct",
3387 construct);
3388 return false;
3390 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3391 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3392 : 0)
3394 case 1:
3395 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3396 bad = "parallel";
3397 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3398 == BUILT_IN_GOMP_CANCEL
3399 && !integer_zerop (gimple_call_arg (stmt, 1)))
3400 ctx->cancellable = true;
3401 kind = "parallel";
3402 break;
3403 case 2:
3404 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3405 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3406 bad = "for";
3407 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3408 == BUILT_IN_GOMP_CANCEL
3409 && !integer_zerop (gimple_call_arg (stmt, 1)))
3411 ctx->cancellable = true;
3412 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3413 OMP_CLAUSE_NOWAIT))
3414 warning_at (gimple_location (stmt), 0,
3415 "%<cancel for%> inside "
3416 "%<nowait%> for construct");
3417 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3418 OMP_CLAUSE_ORDERED))
3419 warning_at (gimple_location (stmt), 0,
3420 "%<cancel for%> inside "
3421 "%<ordered%> for construct");
3423 kind = "for";
3424 break;
3425 case 4:
3426 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3427 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3428 bad = "sections";
3429 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3430 == BUILT_IN_GOMP_CANCEL
3431 && !integer_zerop (gimple_call_arg (stmt, 1)))
3433 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3435 ctx->cancellable = true;
3436 if (omp_find_clause (gimple_omp_sections_clauses
3437 (ctx->stmt),
3438 OMP_CLAUSE_NOWAIT))
3439 warning_at (gimple_location (stmt), 0,
3440 "%<cancel sections%> inside "
3441 "%<nowait%> sections construct");
3443 else
3445 gcc_assert (ctx->outer
3446 && gimple_code (ctx->outer->stmt)
3447 == GIMPLE_OMP_SECTIONS);
3448 ctx->outer->cancellable = true;
3449 if (omp_find_clause (gimple_omp_sections_clauses
3450 (ctx->outer->stmt),
3451 OMP_CLAUSE_NOWAIT))
3452 warning_at (gimple_location (stmt), 0,
3453 "%<cancel sections%> inside "
3454 "%<nowait%> sections construct");
3457 kind = "sections";
3458 break;
3459 case 8:
3460 if (!is_task_ctx (ctx)
3461 && (!is_taskloop_ctx (ctx)
3462 || ctx->outer == NULL
3463 || !is_task_ctx (ctx->outer)))
3464 bad = "task";
3465 else
3467 for (omp_context *octx = ctx->outer;
3468 octx; octx = octx->outer)
3470 switch (gimple_code (octx->stmt))
3472 case GIMPLE_OMP_TASKGROUP:
3473 break;
3474 case GIMPLE_OMP_TARGET:
3475 if (gimple_omp_target_kind (octx->stmt)
3476 != GF_OMP_TARGET_KIND_REGION)
3477 continue;
3478 /* FALLTHRU */
3479 case GIMPLE_OMP_PARALLEL:
3480 case GIMPLE_OMP_TEAMS:
3481 error_at (gimple_location (stmt),
3482 "%<%s taskgroup%> construct not closely "
3483 "nested inside of %<taskgroup%> region",
3484 construct);
3485 return false;
3486 case GIMPLE_OMP_TASK:
3487 if (gimple_omp_task_taskloop_p (octx->stmt)
3488 && octx->outer
3489 && is_taskloop_ctx (octx->outer))
3491 tree clauses
3492 = gimple_omp_for_clauses (octx->outer->stmt);
3493 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3494 break;
3496 continue;
3497 default:
3498 continue;
3500 break;
3502 ctx->cancellable = true;
3504 kind = "taskgroup";
3505 break;
3506 default:
3507 error_at (gimple_location (stmt), "invalid arguments");
3508 return false;
3510 if (bad)
3512 error_at (gimple_location (stmt),
3513 "%<%s %s%> construct not closely nested inside of %qs",
3514 construct, kind, bad);
3515 return false;
3518 /* FALLTHRU */
3519 case GIMPLE_OMP_SECTIONS:
3520 case GIMPLE_OMP_SINGLE:
3521 for (; ctx != NULL; ctx = ctx->outer)
3522 switch (gimple_code (ctx->stmt))
3524 case GIMPLE_OMP_FOR:
3525 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3526 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3527 break;
3528 /* FALLTHRU */
3529 case GIMPLE_OMP_SECTIONS:
3530 case GIMPLE_OMP_SINGLE:
3531 case GIMPLE_OMP_ORDERED:
3532 case GIMPLE_OMP_MASTER:
3533 case GIMPLE_OMP_MASKED:
3534 case GIMPLE_OMP_TASK:
3535 case GIMPLE_OMP_CRITICAL:
3536 if (is_gimple_call (stmt))
3538 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3539 != BUILT_IN_GOMP_BARRIER)
3540 return true;
3541 error_at (gimple_location (stmt),
3542 "barrier region may not be closely nested inside "
3543 "of work-sharing, %<loop%>, %<critical%>, "
3544 "%<ordered%>, %<master%>, %<masked%>, explicit "
3545 "%<task%> or %<taskloop%> region");
3546 return false;
3548 error_at (gimple_location (stmt),
3549 "work-sharing region may not be closely nested inside "
3550 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3551 "%<master%>, %<masked%>, explicit %<task%> or "
3552 "%<taskloop%> region");
3553 return false;
3554 case GIMPLE_OMP_PARALLEL:
3555 case GIMPLE_OMP_TEAMS:
3556 return true;
3557 case GIMPLE_OMP_TARGET:
3558 if (gimple_omp_target_kind (ctx->stmt)
3559 == GF_OMP_TARGET_KIND_REGION)
3560 return true;
3561 break;
3562 default:
3563 break;
3565 break;
3566 case GIMPLE_OMP_MASTER:
3567 case GIMPLE_OMP_MASKED:
3568 for (; ctx != NULL; ctx = ctx->outer)
3569 switch (gimple_code (ctx->stmt))
3571 case GIMPLE_OMP_FOR:
3572 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3573 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3574 break;
3575 /* FALLTHRU */
3576 case GIMPLE_OMP_SECTIONS:
3577 case GIMPLE_OMP_SINGLE:
3578 case GIMPLE_OMP_TASK:
3579 error_at (gimple_location (stmt),
3580 "%qs region may not be closely nested inside "
3581 "of work-sharing, %<loop%>, explicit %<task%> or "
3582 "%<taskloop%> region",
3583 gimple_code (stmt) == GIMPLE_OMP_MASTER
3584 ? "master" : "masked");
3585 return false;
3586 case GIMPLE_OMP_PARALLEL:
3587 case GIMPLE_OMP_TEAMS:
3588 return true;
3589 case GIMPLE_OMP_TARGET:
3590 if (gimple_omp_target_kind (ctx->stmt)
3591 == GF_OMP_TARGET_KIND_REGION)
3592 return true;
3593 break;
3594 default:
3595 break;
3597 break;
3598 case GIMPLE_OMP_SCOPE:
3599 for (; ctx != NULL; ctx = ctx->outer)
3600 switch (gimple_code (ctx->stmt))
3602 case GIMPLE_OMP_FOR:
3603 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3604 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3605 break;
3606 /* FALLTHRU */
3607 case GIMPLE_OMP_SECTIONS:
3608 case GIMPLE_OMP_SINGLE:
3609 case GIMPLE_OMP_TASK:
3610 case GIMPLE_OMP_CRITICAL:
3611 case GIMPLE_OMP_ORDERED:
3612 case GIMPLE_OMP_MASTER:
3613 case GIMPLE_OMP_MASKED:
3614 error_at (gimple_location (stmt),
3615 "%<scope%> region may not be closely nested inside "
3616 "of work-sharing, %<loop%>, explicit %<task%>, "
3617 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3618 "or %<masked%> region");
3619 return false;
3620 case GIMPLE_OMP_PARALLEL:
3621 case GIMPLE_OMP_TEAMS:
3622 return true;
3623 case GIMPLE_OMP_TARGET:
3624 if (gimple_omp_target_kind (ctx->stmt)
3625 == GF_OMP_TARGET_KIND_REGION)
3626 return true;
3627 break;
3628 default:
3629 break;
3631 break;
3632 case GIMPLE_OMP_TASK:
3633 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3634 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3635 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3636 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3638 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3639 error_at (OMP_CLAUSE_LOCATION (c),
3640 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3641 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3642 return false;
3644 break;
3645 case GIMPLE_OMP_ORDERED:
3646 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3647 c; c = OMP_CLAUSE_CHAIN (c))
3649 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3651 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3652 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3653 continue;
3655 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3656 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3657 || kind == OMP_CLAUSE_DEPEND_SINK)
3659 tree oclause;
3660 /* Look for containing ordered(N) loop. */
3661 if (ctx == NULL
3662 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3663 || (oclause
3664 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3665 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3667 error_at (OMP_CLAUSE_LOCATION (c),
3668 "%<ordered%> construct with %<depend%> clause "
3669 "must be closely nested inside an %<ordered%> "
3670 "loop");
3671 return false;
3673 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3675 error_at (OMP_CLAUSE_LOCATION (c),
3676 "%<ordered%> construct with %<depend%> clause "
3677 "must be closely nested inside a loop with "
3678 "%<ordered%> clause with a parameter");
3679 return false;
3682 else
3684 error_at (OMP_CLAUSE_LOCATION (c),
3685 "invalid depend kind in omp %<ordered%> %<depend%>");
3686 return false;
3689 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3690 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3692 /* ordered simd must be closely nested inside of simd region,
3693 and simd region must not encounter constructs other than
3694 ordered simd, therefore ordered simd may be either orphaned,
3695 or ctx->stmt must be simd. The latter case is handled already
3696 earlier. */
3697 if (ctx != NULL)
3699 error_at (gimple_location (stmt),
3700 "%<ordered%> %<simd%> must be closely nested inside "
3701 "%<simd%> region");
3702 return false;
3705 for (; ctx != NULL; ctx = ctx->outer)
3706 switch (gimple_code (ctx->stmt))
3708 case GIMPLE_OMP_CRITICAL:
3709 case GIMPLE_OMP_TASK:
3710 case GIMPLE_OMP_ORDERED:
3711 ordered_in_taskloop:
3712 error_at (gimple_location (stmt),
3713 "%<ordered%> region may not be closely nested inside "
3714 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3715 "%<taskloop%> region");
3716 return false;
3717 case GIMPLE_OMP_FOR:
3718 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3719 goto ordered_in_taskloop;
3720 tree o;
3721 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3722 OMP_CLAUSE_ORDERED);
3723 if (o == NULL)
3725 error_at (gimple_location (stmt),
3726 "%<ordered%> region must be closely nested inside "
3727 "a loop region with an %<ordered%> clause");
3728 return false;
3730 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3731 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3733 error_at (gimple_location (stmt),
3734 "%<ordered%> region without %<depend%> clause may "
3735 "not be closely nested inside a loop region with "
3736 "an %<ordered%> clause with a parameter");
3737 return false;
3739 return true;
3740 case GIMPLE_OMP_TARGET:
3741 if (gimple_omp_target_kind (ctx->stmt)
3742 != GF_OMP_TARGET_KIND_REGION)
3743 break;
3744 /* FALLTHRU */
3745 case GIMPLE_OMP_PARALLEL:
3746 case GIMPLE_OMP_TEAMS:
3747 error_at (gimple_location (stmt),
3748 "%<ordered%> region must be closely nested inside "
3749 "a loop region with an %<ordered%> clause");
3750 return false;
3751 default:
3752 break;
3754 break;
3755 case GIMPLE_OMP_CRITICAL:
3757 tree this_stmt_name
3758 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3759 for (; ctx != NULL; ctx = ctx->outer)
3760 if (gomp_critical *other_crit
3761 = dyn_cast <gomp_critical *> (ctx->stmt))
3762 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3764 error_at (gimple_location (stmt),
3765 "%<critical%> region may not be nested inside "
3766 "a %<critical%> region with the same name");
3767 return false;
3770 break;
3771 case GIMPLE_OMP_TEAMS:
3772 if (ctx == NULL)
3773 break;
3774 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3775 || (gimple_omp_target_kind (ctx->stmt)
3776 != GF_OMP_TARGET_KIND_REGION))
3778 /* Teams construct can appear either strictly nested inside of
3779 target construct with no intervening stmts, or can be encountered
3780 only by initial task (so must not appear inside any OpenMP
3781 construct. */
3782 error_at (gimple_location (stmt),
3783 "%<teams%> construct must be closely nested inside of "
3784 "%<target%> construct or not nested in any OpenMP "
3785 "construct");
3786 return false;
3788 break;
3789 case GIMPLE_OMP_TARGET:
3790 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3791 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3792 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3793 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3795 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3796 error_at (OMP_CLAUSE_LOCATION (c),
3797 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3798 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3799 return false;
3801 if (is_gimple_omp_offloaded (stmt)
3802 && oacc_get_fn_attrib (cfun->decl) != NULL)
3804 error_at (gimple_location (stmt),
3805 "OpenACC region inside of OpenACC routine, nested "
3806 "parallelism not supported yet");
3807 return false;
3809 for (; ctx != NULL; ctx = ctx->outer)
3811 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3813 if (is_gimple_omp (stmt)
3814 && is_gimple_omp_oacc (stmt)
3815 && is_gimple_omp (ctx->stmt))
3817 error_at (gimple_location (stmt),
3818 "OpenACC construct inside of non-OpenACC region");
3819 return false;
3821 continue;
3824 const char *stmt_name, *ctx_stmt_name;
3825 switch (gimple_omp_target_kind (stmt))
3827 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3828 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3829 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3830 case GF_OMP_TARGET_KIND_ENTER_DATA:
3831 stmt_name = "target enter data"; break;
3832 case GF_OMP_TARGET_KIND_EXIT_DATA:
3833 stmt_name = "target exit data"; break;
3834 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3835 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3836 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3837 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3838 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3839 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3840 stmt_name = "enter data"; break;
3841 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3842 stmt_name = "exit data"; break;
3843 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3844 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3845 break;
3846 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3847 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3848 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3849 /* OpenACC 'kernels' decomposed parts. */
3850 stmt_name = "kernels"; break;
3851 default: gcc_unreachable ();
3853 switch (gimple_omp_target_kind (ctx->stmt))
3855 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3856 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3857 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3858 ctx_stmt_name = "parallel"; break;
3859 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3860 ctx_stmt_name = "kernels"; break;
3861 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3862 ctx_stmt_name = "serial"; break;
3863 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3864 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3865 ctx_stmt_name = "host_data"; break;
3866 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3867 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3868 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3869 /* OpenACC 'kernels' decomposed parts. */
3870 ctx_stmt_name = "kernels"; break;
3871 default: gcc_unreachable ();
3874 /* OpenACC/OpenMP mismatch? */
3875 if (is_gimple_omp_oacc (stmt)
3876 != is_gimple_omp_oacc (ctx->stmt))
3878 error_at (gimple_location (stmt),
3879 "%s %qs construct inside of %s %qs region",
3880 (is_gimple_omp_oacc (stmt)
3881 ? "OpenACC" : "OpenMP"), stmt_name,
3882 (is_gimple_omp_oacc (ctx->stmt)
3883 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3884 return false;
3886 if (is_gimple_omp_offloaded (ctx->stmt))
3888 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3889 if (is_gimple_omp_oacc (ctx->stmt))
3891 error_at (gimple_location (stmt),
3892 "%qs construct inside of %qs region",
3893 stmt_name, ctx_stmt_name);
3894 return false;
3896 else
3898 if ((gimple_omp_target_kind (ctx->stmt)
3899 == GF_OMP_TARGET_KIND_REGION)
3900 && (gimple_omp_target_kind (stmt)
3901 == GF_OMP_TARGET_KIND_REGION))
3903 c = omp_find_clause (gimple_omp_target_clauses (stmt),
3904 OMP_CLAUSE_DEVICE);
3905 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3906 break;
3908 warning_at (gimple_location (stmt), 0,
3909 "%qs construct inside of %qs region",
3910 stmt_name, ctx_stmt_name);
3914 break;
3915 default:
3916 break;
3918 return true;
3922 /* Helper function scan_omp.
3924 Callback for walk_tree or operators in walk_gimple_stmt used to
3925 scan for OMP directives in TP. */
3927 static tree
3928 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3930 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3931 omp_context *ctx = (omp_context *) wi->info;
3932 tree t = *tp;
3934 switch (TREE_CODE (t))
3936 case VAR_DECL:
3937 case PARM_DECL:
3938 case LABEL_DECL:
3939 case RESULT_DECL:
3940 if (ctx)
3942 tree repl = remap_decl (t, &ctx->cb);
3943 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3944 *tp = repl;
3946 break;
3948 default:
3949 if (ctx && TYPE_P (t))
3950 *tp = remap_type (t, &ctx->cb);
3951 else if (!DECL_P (t))
3953 *walk_subtrees = 1;
3954 if (ctx)
3956 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3957 if (tem != TREE_TYPE (t))
3959 if (TREE_CODE (t) == INTEGER_CST)
3960 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3961 else
3962 TREE_TYPE (t) = tem;
3966 break;
3969 return NULL_TREE;
3972 /* Return true if FNDECL is a setjmp or a longjmp. */
3974 static bool
3975 setjmp_or_longjmp_p (const_tree fndecl)
3977 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3978 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3979 return true;
3981 tree declname = DECL_NAME (fndecl);
3982 if (!declname
3983 || (DECL_CONTEXT (fndecl) != NULL_TREE
3984 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3985 || !TREE_PUBLIC (fndecl))
3986 return false;
3988 const char *name = IDENTIFIER_POINTER (declname);
3989 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3992 /* Return true if FNDECL is an omp_* runtime API call. */
3994 static bool
3995 omp_runtime_api_call (const_tree fndecl)
3997 tree declname = DECL_NAME (fndecl);
3998 if (!declname
3999 || (DECL_CONTEXT (fndecl) != NULL_TREE
4000 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4001 || !TREE_PUBLIC (fndecl))
4002 return false;
4004 const char *name = IDENTIFIER_POINTER (declname);
4005 if (!startswith (name, "omp_"))
4006 return false;
4008 static const char *omp_runtime_apis[] =
4010 /* This array has 3 sections. First omp_* calls that don't
4011 have any suffixes. */
4012 "aligned_alloc",
4013 "aligned_calloc",
4014 "alloc",
4015 "calloc",
4016 "free",
4017 "get_mapped_ptr",
4018 "realloc",
4019 "target_alloc",
4020 "target_associate_ptr",
4021 "target_disassociate_ptr",
4022 "target_free",
4023 "target_is_accessible",
4024 "target_is_present",
4025 "target_memcpy",
4026 "target_memcpy_async",
4027 "target_memcpy_rect",
4028 "target_memcpy_rect_async",
4029 NULL,
4030 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4031 DECL_NAME is always omp_* without tailing underscore. */
4032 "capture_affinity",
4033 "destroy_allocator",
4034 "destroy_lock",
4035 "destroy_nest_lock",
4036 "display_affinity",
4037 "fulfill_event",
4038 "get_active_level",
4039 "get_affinity_format",
4040 "get_cancellation",
4041 "get_default_allocator",
4042 "get_default_device",
4043 "get_device_num",
4044 "get_dynamic",
4045 "get_initial_device",
4046 "get_level",
4047 "get_max_active_levels",
4048 "get_max_task_priority",
4049 "get_max_teams",
4050 "get_max_threads",
4051 "get_nested",
4052 "get_num_devices",
4053 "get_num_places",
4054 "get_num_procs",
4055 "get_num_teams",
4056 "get_num_threads",
4057 "get_partition_num_places",
4058 "get_place_num",
4059 "get_proc_bind",
4060 "get_supported_active_levels",
4061 "get_team_num",
4062 "get_teams_thread_limit",
4063 "get_thread_limit",
4064 "get_thread_num",
4065 "get_wtick",
4066 "get_wtime",
4067 "in_final",
4068 "in_parallel",
4069 "init_lock",
4070 "init_nest_lock",
4071 "is_initial_device",
4072 "pause_resource",
4073 "pause_resource_all",
4074 "set_affinity_format",
4075 "set_default_allocator",
4076 "set_lock",
4077 "set_nest_lock",
4078 "test_lock",
4079 "test_nest_lock",
4080 "unset_lock",
4081 "unset_nest_lock",
4082 NULL,
4083 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4084 as DECL_NAME only omp_* and omp_*_8 appear. */
4085 "display_env",
4086 "get_ancestor_thread_num",
4087 "init_allocator",
4088 "get_partition_place_nums",
4089 "get_place_num_procs",
4090 "get_place_proc_ids",
4091 "get_schedule",
4092 "get_team_size",
4093 "set_default_device",
4094 "set_dynamic",
4095 "set_max_active_levels",
4096 "set_nested",
4097 "set_num_teams",
4098 "set_num_threads",
4099 "set_schedule",
4100 "set_teams_thread_limit"
4103 int mode = 0;
4104 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4106 if (omp_runtime_apis[i] == NULL)
4108 mode++;
4109 continue;
4111 size_t len = strlen (omp_runtime_apis[i]);
4112 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4113 && (name[4 + len] == '\0'
4114 || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4115 return true;
4117 return false;
4120 /* Helper function for scan_omp.
4122 Callback for walk_gimple_stmt used to scan for OMP directives in
4123 the current statement in GSI. */
4125 static tree
4126 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4127 struct walk_stmt_info *wi)
4129 gimple *stmt = gsi_stmt (*gsi);
4130 omp_context *ctx = (omp_context *) wi->info;
4132 if (gimple_has_location (stmt))
4133 input_location = gimple_location (stmt);
4135 /* Check the nesting restrictions. */
4136 bool remove = false;
4137 if (is_gimple_omp (stmt))
4138 remove = !check_omp_nesting_restrictions (stmt, ctx);
4139 else if (is_gimple_call (stmt))
4141 tree fndecl = gimple_call_fndecl (stmt);
4142 if (fndecl)
4144 if (ctx
4145 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4146 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4147 && setjmp_or_longjmp_p (fndecl)
4148 && !ctx->loop_p)
4150 remove = true;
4151 error_at (gimple_location (stmt),
4152 "setjmp/longjmp inside %<simd%> construct");
4154 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4155 switch (DECL_FUNCTION_CODE (fndecl))
4157 case BUILT_IN_GOMP_BARRIER:
4158 case BUILT_IN_GOMP_CANCEL:
4159 case BUILT_IN_GOMP_CANCELLATION_POINT:
4160 case BUILT_IN_GOMP_TASKYIELD:
4161 case BUILT_IN_GOMP_TASKWAIT:
4162 case BUILT_IN_GOMP_TASKGROUP_START:
4163 case BUILT_IN_GOMP_TASKGROUP_END:
4164 remove = !check_omp_nesting_restrictions (stmt, ctx);
4165 break;
4166 default:
4167 break;
4169 else if (ctx)
4171 omp_context *octx = ctx;
4172 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4173 octx = ctx->outer;
4174 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4176 remove = true;
4177 error_at (gimple_location (stmt),
4178 "OpenMP runtime API call %qD in a region with "
4179 "%<order(concurrent)%> clause", fndecl);
4181 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4182 && omp_runtime_api_call (fndecl)
4183 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4184 != strlen ("omp_get_num_teams"))
4185 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4186 "omp_get_num_teams") != 0)
4187 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4188 != strlen ("omp_get_team_num"))
4189 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4190 "omp_get_team_num") != 0))
4192 remove = true;
4193 error_at (gimple_location (stmt),
4194 "OpenMP runtime API call %qD strictly nested in a "
4195 "%<teams%> region", fndecl);
4197 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4198 && (gimple_omp_target_kind (ctx->stmt)
4199 == GF_OMP_TARGET_KIND_REGION)
4200 && omp_runtime_api_call (fndecl))
4202 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4203 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4204 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4205 error_at (gimple_location (stmt),
4206 "OpenMP runtime API call %qD in a region with "
4207 "%<device(ancestor)%> clause", fndecl);
4212 if (remove)
4214 stmt = gimple_build_nop ();
4215 gsi_replace (gsi, stmt, false);
4218 *handled_ops_p = true;
4220 switch (gimple_code (stmt))
4222 case GIMPLE_OMP_PARALLEL:
4223 taskreg_nesting_level++;
4224 scan_omp_parallel (gsi, ctx);
4225 taskreg_nesting_level--;
4226 break;
4228 case GIMPLE_OMP_TASK:
4229 taskreg_nesting_level++;
4230 scan_omp_task (gsi, ctx);
4231 taskreg_nesting_level--;
4232 break;
4234 case GIMPLE_OMP_FOR:
4235 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4236 == GF_OMP_FOR_KIND_SIMD)
4237 && gimple_omp_for_combined_into_p (stmt)
4238 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4240 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4241 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4242 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4244 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4245 break;
4248 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4249 == GF_OMP_FOR_KIND_SIMD)
4250 && omp_maybe_offloaded_ctx (ctx)
4251 && omp_max_simt_vf ()
4252 && gimple_omp_for_collapse (stmt) == 1)
4253 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4254 else
4255 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4256 break;
4258 case GIMPLE_OMP_SCOPE:
4259 ctx = new_omp_context (stmt, ctx);
4260 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4261 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4262 break;
4264 case GIMPLE_OMP_SECTIONS:
4265 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4266 break;
4268 case GIMPLE_OMP_SINGLE:
4269 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4270 break;
4272 case GIMPLE_OMP_SCAN:
4273 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4275 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4276 ctx->scan_inclusive = true;
4277 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4278 ctx->scan_exclusive = true;
4280 /* FALLTHRU */
4281 case GIMPLE_OMP_SECTION:
4282 case GIMPLE_OMP_MASTER:
4283 case GIMPLE_OMP_ORDERED:
4284 case GIMPLE_OMP_CRITICAL:
4285 ctx = new_omp_context (stmt, ctx);
4286 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4287 break;
4289 case GIMPLE_OMP_MASKED:
4290 ctx = new_omp_context (stmt, ctx);
4291 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4292 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4293 break;
4295 case GIMPLE_OMP_TASKGROUP:
4296 ctx = new_omp_context (stmt, ctx);
4297 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4298 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4299 break;
4301 case GIMPLE_OMP_TARGET:
4302 if (is_gimple_omp_offloaded (stmt))
4304 taskreg_nesting_level++;
4305 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4306 taskreg_nesting_level--;
4308 else
4309 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4310 break;
4312 case GIMPLE_OMP_TEAMS:
4313 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4315 taskreg_nesting_level++;
4316 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4317 taskreg_nesting_level--;
4319 else
4320 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4321 break;
4323 case GIMPLE_BIND:
4325 tree var;
4327 *handled_ops_p = false;
4328 if (ctx)
4329 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4330 var ;
4331 var = DECL_CHAIN (var))
4332 insert_decl_map (&ctx->cb, var, var);
4334 break;
4335 default:
4336 *handled_ops_p = false;
4337 break;
4340 return NULL_TREE;
4344 /* Scan all the statements starting at the current statement. CTX
4345 contains context information about the OMP directives and
4346 clauses found during the scan. */
4348 static void
4349 scan_omp (gimple_seq *body_p, omp_context *ctx)
4351 location_t saved_location;
4352 struct walk_stmt_info wi;
4354 memset (&wi, 0, sizeof (wi));
4355 wi.info = ctx;
4356 wi.want_locations = true;
4358 saved_location = input_location;
4359 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4360 input_location = saved_location;
4363 /* Re-gimplification and code generation routines. */
4365 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4366 of BIND if in a method. */
4368 static void
4369 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4371 if (DECL_ARGUMENTS (current_function_decl)
4372 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4373 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4374 == POINTER_TYPE))
4376 tree vars = gimple_bind_vars (bind);
4377 for (tree *pvar = &vars; *pvar; )
4378 if (omp_member_access_dummy_var (*pvar))
4379 *pvar = DECL_CHAIN (*pvar);
4380 else
4381 pvar = &DECL_CHAIN (*pvar);
4382 gimple_bind_set_vars (bind, vars);
4386 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4387 block and its subblocks. */
4389 static void
4390 remove_member_access_dummy_vars (tree block)
4392 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4393 if (omp_member_access_dummy_var (*pvar))
4394 *pvar = DECL_CHAIN (*pvar);
4395 else
4396 pvar = &DECL_CHAIN (*pvar);
4398 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4399 remove_member_access_dummy_vars (block);
4402 /* If a context was created for STMT when it was scanned, return it. */
4404 static omp_context *
4405 maybe_lookup_ctx (gimple *stmt)
4407 splay_tree_node n;
4408 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4409 return n ? (omp_context *) n->value : NULL;
4413 /* Find the mapping for DECL in CTX or the immediately enclosing
4414 context that has a mapping for DECL.
4416 If CTX is a nested parallel directive, we may have to use the decl
4417 mappings created in CTX's parent context. Suppose that we have the
4418 following parallel nesting (variable UIDs showed for clarity):
4420 iD.1562 = 0;
4421 #omp parallel shared(iD.1562) -> outer parallel
4422 iD.1562 = iD.1562 + 1;
4424 #omp parallel shared (iD.1562) -> inner parallel
4425 iD.1562 = iD.1562 - 1;
4427 Each parallel structure will create a distinct .omp_data_s structure
4428 for copying iD.1562 in/out of the directive:
4430 outer parallel .omp_data_s.1.i -> iD.1562
4431 inner parallel .omp_data_s.2.i -> iD.1562
4433 A shared variable mapping will produce a copy-out operation before
4434 the parallel directive and a copy-in operation after it. So, in
4435 this case we would have:
4437 iD.1562 = 0;
4438 .omp_data_o.1.i = iD.1562;
4439 #omp parallel shared(iD.1562) -> outer parallel
4440 .omp_data_i.1 = &.omp_data_o.1
4441 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4443 .omp_data_o.2.i = iD.1562; -> **
4444 #omp parallel shared(iD.1562) -> inner parallel
4445 .omp_data_i.2 = &.omp_data_o.2
4446 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4449 ** This is a problem. The symbol iD.1562 cannot be referenced
4450 inside the body of the outer parallel region. But since we are
4451 emitting this copy operation while expanding the inner parallel
4452 directive, we need to access the CTX structure of the outer
4453 parallel directive to get the correct mapping:
4455 .omp_data_o.2.i = .omp_data_i.1->i
4457 Since there may be other workshare or parallel directives enclosing
4458 the parallel directive, it may be necessary to walk up the context
4459 parent chain. This is not a problem in general because nested
4460 parallelism happens only rarely. */
4462 static tree
4463 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4465 tree t;
4466 omp_context *up;
4468 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4469 t = maybe_lookup_decl (decl, up);
4471 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4473 return t ? t : decl;
4477 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4478 in outer contexts. */
4480 static tree
4481 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4483 tree t = NULL;
4484 omp_context *up;
4486 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4487 t = maybe_lookup_decl (decl, up);
4489 return t ? t : decl;
4493 /* Construct the initialization value for reduction operation OP. */
4495 tree
4496 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4498 switch (op)
4500 case PLUS_EXPR:
4501 case MINUS_EXPR:
4502 case BIT_IOR_EXPR:
4503 case BIT_XOR_EXPR:
4504 case TRUTH_OR_EXPR:
4505 case TRUTH_ORIF_EXPR:
4506 case TRUTH_XOR_EXPR:
4507 case NE_EXPR:
4508 return build_zero_cst (type);
4510 case MULT_EXPR:
4511 case TRUTH_AND_EXPR:
4512 case TRUTH_ANDIF_EXPR:
4513 case EQ_EXPR:
4514 return fold_convert_loc (loc, type, integer_one_node);
4516 case BIT_AND_EXPR:
4517 return fold_convert_loc (loc, type, integer_minus_one_node);
4519 case MAX_EXPR:
4520 if (SCALAR_FLOAT_TYPE_P (type))
4522 REAL_VALUE_TYPE max, min;
4523 if (HONOR_INFINITIES (type))
4525 real_inf (&max);
4526 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4528 else
4529 real_maxval (&min, 1, TYPE_MODE (type));
4530 return build_real (type, min);
4532 else if (POINTER_TYPE_P (type))
4534 wide_int min
4535 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4536 return wide_int_to_tree (type, min);
4538 else
4540 gcc_assert (INTEGRAL_TYPE_P (type));
4541 return TYPE_MIN_VALUE (type);
4544 case MIN_EXPR:
4545 if (SCALAR_FLOAT_TYPE_P (type))
4547 REAL_VALUE_TYPE max;
4548 if (HONOR_INFINITIES (type))
4549 real_inf (&max);
4550 else
4551 real_maxval (&max, 0, TYPE_MODE (type));
4552 return build_real (type, max);
4554 else if (POINTER_TYPE_P (type))
4556 wide_int max
4557 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4558 return wide_int_to_tree (type, max);
4560 else
4562 gcc_assert (INTEGRAL_TYPE_P (type));
4563 return TYPE_MAX_VALUE (type);
4566 default:
4567 gcc_unreachable ();
4571 /* Construct the initialization value for reduction CLAUSE. */
4573 tree
4574 omp_reduction_init (tree clause, tree type)
4576 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4577 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4580 /* Return alignment to be assumed for var in CLAUSE, which should be
4581 OMP_CLAUSE_ALIGNED. */
4583 static tree
4584 omp_clause_aligned_alignment (tree clause)
4586 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4587 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4589 /* Otherwise return implementation defined alignment. */
4590 unsigned int al = 1;
4591 opt_scalar_mode mode_iter;
4592 auto_vector_modes modes;
4593 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4594 static enum mode_class classes[]
4595 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4596 for (int i = 0; i < 4; i += 2)
4597 /* The for loop above dictates that we only walk through scalar classes. */
4598 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4600 scalar_mode mode = mode_iter.require ();
4601 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4602 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4603 continue;
4604 machine_mode alt_vmode;
4605 for (unsigned int j = 0; j < modes.length (); ++j)
4606 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4607 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4608 vmode = alt_vmode;
4610 tree type = lang_hooks.types.type_for_mode (mode, 1);
4611 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4612 continue;
4613 type = build_vector_type_for_mode (type, vmode);
4614 if (TYPE_MODE (type) != vmode)
4615 continue;
4616 if (TYPE_ALIGN_UNIT (type) > al)
4617 al = TYPE_ALIGN_UNIT (type);
4619 return build_int_cst (integer_type_node, al);
4623 /* This structure is part of the interface between lower_rec_simd_input_clauses
4624 and lower_rec_input_clauses. */
4626 class omplow_simd_context {
4627 public:
4628 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4629 tree idx;
4630 tree lane;
4631 tree lastlane;
4632 vec<tree, va_heap> simt_eargs;
4633 gimple_seq simt_dlist;
4634 poly_uint64_pod max_vf;
4635 bool is_simt;
4638 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4639 privatization. */
4641 static bool
4642 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4643 omplow_simd_context *sctx, tree &ivar,
4644 tree &lvar, tree *rvar = NULL,
4645 tree *rvar2 = NULL)
4647 if (known_eq (sctx->max_vf, 0U))
4649 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4650 if (maybe_gt (sctx->max_vf, 1U))
4652 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4653 OMP_CLAUSE_SAFELEN);
4654 if (c)
4656 poly_uint64 safe_len;
4657 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4658 || maybe_lt (safe_len, 1U))
4659 sctx->max_vf = 1;
4660 else
4661 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4664 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4666 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4667 c = OMP_CLAUSE_CHAIN (c))
4669 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4670 continue;
4672 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4674 /* UDR reductions are not supported yet for SIMT, disable
4675 SIMT. */
4676 sctx->max_vf = 1;
4677 break;
4680 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4681 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4683 /* Doing boolean operations on non-integral types is
4684 for conformance only, it's not worth supporting this
4685 for SIMT. */
4686 sctx->max_vf = 1;
4687 break;
4691 if (maybe_gt (sctx->max_vf, 1U))
4693 sctx->idx = create_tmp_var (unsigned_type_node);
4694 sctx->lane = create_tmp_var (unsigned_type_node);
4697 if (known_eq (sctx->max_vf, 1U))
4698 return false;
4700 if (sctx->is_simt)
4702 if (is_gimple_reg (new_var))
4704 ivar = lvar = new_var;
4705 return true;
4707 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4708 ivar = lvar = create_tmp_var (type);
4709 TREE_ADDRESSABLE (ivar) = 1;
4710 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4711 NULL, DECL_ATTRIBUTES (ivar));
4712 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4713 tree clobber = build_clobber (type);
4714 gimple *g = gimple_build_assign (ivar, clobber);
4715 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4717 else
4719 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4720 tree avar = create_tmp_var_raw (atype);
4721 if (TREE_ADDRESSABLE (new_var))
4722 TREE_ADDRESSABLE (avar) = 1;
4723 DECL_ATTRIBUTES (avar)
4724 = tree_cons (get_identifier ("omp simd array"), NULL,
4725 DECL_ATTRIBUTES (avar));
4726 gimple_add_tmp_var (avar);
4727 tree iavar = avar;
4728 if (rvar && !ctx->for_simd_scan_phase)
4730 /* For inscan reductions, create another array temporary,
4731 which will hold the reduced value. */
4732 iavar = create_tmp_var_raw (atype);
4733 if (TREE_ADDRESSABLE (new_var))
4734 TREE_ADDRESSABLE (iavar) = 1;
4735 DECL_ATTRIBUTES (iavar)
4736 = tree_cons (get_identifier ("omp simd array"), NULL,
4737 tree_cons (get_identifier ("omp simd inscan"), NULL,
4738 DECL_ATTRIBUTES (iavar)));
4739 gimple_add_tmp_var (iavar);
4740 ctx->cb.decl_map->put (avar, iavar);
4741 if (sctx->lastlane == NULL_TREE)
4742 sctx->lastlane = create_tmp_var (unsigned_type_node);
4743 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4744 sctx->lastlane, NULL_TREE, NULL_TREE);
4745 TREE_THIS_NOTRAP (*rvar) = 1;
4747 if (ctx->scan_exclusive)
4749 /* And for exclusive scan yet another one, which will
4750 hold the value during the scan phase. */
4751 tree savar = create_tmp_var_raw (atype);
4752 if (TREE_ADDRESSABLE (new_var))
4753 TREE_ADDRESSABLE (savar) = 1;
4754 DECL_ATTRIBUTES (savar)
4755 = tree_cons (get_identifier ("omp simd array"), NULL,
4756 tree_cons (get_identifier ("omp simd inscan "
4757 "exclusive"), NULL,
4758 DECL_ATTRIBUTES (savar)));
4759 gimple_add_tmp_var (savar);
4760 ctx->cb.decl_map->put (iavar, savar);
4761 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4762 sctx->idx, NULL_TREE, NULL_TREE);
4763 TREE_THIS_NOTRAP (*rvar2) = 1;
4766 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4767 NULL_TREE, NULL_TREE);
4768 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4769 NULL_TREE, NULL_TREE);
4770 TREE_THIS_NOTRAP (ivar) = 1;
4771 TREE_THIS_NOTRAP (lvar) = 1;
4773 if (DECL_P (new_var))
4775 SET_DECL_VALUE_EXPR (new_var, lvar);
4776 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4778 return true;
4781 /* Helper function of lower_rec_input_clauses. For a reference
4782 in simd reduction, add an underlying variable it will reference. */
4784 static void
4785 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4787 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4788 if (TREE_CONSTANT (z))
4790 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4791 get_name (new_vard));
4792 gimple_add_tmp_var (z);
4793 TREE_ADDRESSABLE (z) = 1;
4794 z = build_fold_addr_expr_loc (loc, z);
4795 gimplify_assign (new_vard, z, ilist);
4799 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4800 code to emit (type) (tskred_temp[idx]). */
4802 static tree
4803 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4804 unsigned idx)
4806 unsigned HOST_WIDE_INT sz
4807 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4808 tree r = build2 (MEM_REF, pointer_sized_int_node,
4809 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4810 idx * sz));
4811 tree v = create_tmp_var (pointer_sized_int_node);
4812 gimple *g = gimple_build_assign (v, r);
4813 gimple_seq_add_stmt (ilist, g);
4814 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4816 v = create_tmp_var (type);
4817 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4818 gimple_seq_add_stmt (ilist, g);
4820 return v;
4823 /* Lower early initialization of privatized variable NEW_VAR
4824 if it needs an allocator (has allocate clause). */
4826 static bool
4827 lower_private_allocate (tree var, tree new_var, tree &allocator,
4828 tree &allocate_ptr, gimple_seq *ilist,
4829 omp_context *ctx, bool is_ref, tree size)
4831 if (allocator)
4832 return false;
4833 gcc_assert (allocate_ptr == NULL_TREE);
4834 if (ctx->allocate_map
4835 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4836 if (tree *allocatorp = ctx->allocate_map->get (var))
4837 allocator = *allocatorp;
4838 if (allocator == NULL_TREE)
4839 return false;
4840 if (!is_ref && omp_privatize_by_reference (var))
4842 allocator = NULL_TREE;
4843 return false;
4846 unsigned HOST_WIDE_INT ialign = 0;
4847 if (TREE_CODE (allocator) == TREE_LIST)
4849 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4850 allocator = TREE_PURPOSE (allocator);
4852 if (TREE_CODE (allocator) != INTEGER_CST)
4853 allocator = build_outer_var_ref (allocator, ctx, OMP_CLAUSE_ALLOCATE);
4854 allocator = fold_convert (pointer_sized_int_node, allocator);
4855 if (TREE_CODE (allocator) != INTEGER_CST)
4857 tree var = create_tmp_var (TREE_TYPE (allocator));
4858 gimplify_assign (var, allocator, ilist);
4859 allocator = var;
4862 tree ptr_type, align, sz = size;
4863 if (TYPE_P (new_var))
4865 ptr_type = build_pointer_type (new_var);
4866 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4868 else if (is_ref)
4870 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4871 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4873 else
4875 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4876 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4877 if (sz == NULL_TREE)
4878 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4880 align = build_int_cst (size_type_node, ialign);
4881 if (TREE_CODE (sz) != INTEGER_CST)
4883 tree szvar = create_tmp_var (size_type_node);
4884 gimplify_assign (szvar, sz, ilist);
4885 sz = szvar;
4887 allocate_ptr = create_tmp_var (ptr_type);
4888 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4889 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4890 gimple_call_set_lhs (g, allocate_ptr);
4891 gimple_seq_add_stmt (ilist, g);
4892 if (!is_ref)
4894 tree x = build_simple_mem_ref (allocate_ptr);
4895 TREE_THIS_NOTRAP (x) = 1;
4896 SET_DECL_VALUE_EXPR (new_var, x);
4897 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4899 return true;
4902 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4903 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4904 private variables. Initialization statements go in ILIST, while calls
4905 to destructors go in DLIST. */
4907 static void
4908 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4909 omp_context *ctx, struct omp_for_data *fd)
4911 tree c, copyin_seq, x, ptr;
4912 bool copyin_by_ref = false;
4913 bool lastprivate_firstprivate = false;
4914 bool reduction_omp_orig_ref = false;
4915 int pass;
4916 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4917 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4918 omplow_simd_context sctx = omplow_simd_context ();
4919 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4920 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4921 gimple_seq llist[4] = { };
4922 tree nonconst_simd_if = NULL_TREE;
4924 copyin_seq = NULL;
4925 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4927 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4928 with data sharing clauses referencing variable sized vars. That
4929 is unnecessarily hard to support and very unlikely to result in
4930 vectorized code anyway. */
4931 if (is_simd)
4932 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4933 switch (OMP_CLAUSE_CODE (c))
4935 case OMP_CLAUSE_LINEAR:
4936 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4937 sctx.max_vf = 1;
4938 /* FALLTHRU */
4939 case OMP_CLAUSE_PRIVATE:
4940 case OMP_CLAUSE_FIRSTPRIVATE:
4941 case OMP_CLAUSE_LASTPRIVATE:
4942 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4943 sctx.max_vf = 1;
4944 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4946 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4947 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4948 sctx.max_vf = 1;
4950 break;
4951 case OMP_CLAUSE_REDUCTION:
4952 case OMP_CLAUSE_IN_REDUCTION:
4953 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4954 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4955 sctx.max_vf = 1;
4956 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4958 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4959 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4960 sctx.max_vf = 1;
4962 break;
4963 case OMP_CLAUSE_IF:
4964 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4965 sctx.max_vf = 1;
4966 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4967 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4968 break;
4969 case OMP_CLAUSE_SIMDLEN:
4970 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4971 sctx.max_vf = 1;
4972 break;
4973 case OMP_CLAUSE__CONDTEMP_:
4974 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4975 if (sctx.is_simt)
4976 sctx.max_vf = 1;
4977 break;
4978 default:
4979 continue;
4982 /* Add a placeholder for simduid. */
4983 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4984 sctx.simt_eargs.safe_push (NULL_TREE);
4986 unsigned task_reduction_cnt = 0;
4987 unsigned task_reduction_cntorig = 0;
4988 unsigned task_reduction_cnt_full = 0;
4989 unsigned task_reduction_cntorig_full = 0;
4990 unsigned task_reduction_other_cnt = 0;
4991 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4992 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4993 /* Do all the fixed sized types in the first pass, and the variable sized
4994 types in the second pass. This makes sure that the scalar arguments to
4995 the variable sized types are processed before we use them in the
4996 variable sized operations. For task reductions we use 4 passes, in the
4997 first two we ignore them, in the third one gather arguments for
4998 GOMP_task_reduction_remap call and in the last pass actually handle
4999 the task reductions. */
5000 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
5001 ? 4 : 2); ++pass)
5003 if (pass == 2 && task_reduction_cnt)
5005 tskred_atype
5006 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
5007 + task_reduction_cntorig);
5008 tskred_avar = create_tmp_var_raw (tskred_atype);
5009 gimple_add_tmp_var (tskred_avar);
5010 TREE_ADDRESSABLE (tskred_avar) = 1;
5011 task_reduction_cnt_full = task_reduction_cnt;
5012 task_reduction_cntorig_full = task_reduction_cntorig;
5014 else if (pass == 3 && task_reduction_cnt)
5016 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
5017 gimple *g
5018 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
5019 size_int (task_reduction_cntorig),
5020 build_fold_addr_expr (tskred_avar));
5021 gimple_seq_add_stmt (ilist, g);
5023 if (pass == 3 && task_reduction_other_cnt)
5025 /* For reduction clauses, build
5026 tskred_base = (void *) tskred_temp[2]
5027 + omp_get_thread_num () * tskred_temp[1]
5028 or if tskred_temp[1] is known to be constant, that constant
5029 directly. This is the start of the private reduction copy block
5030 for the current thread. */
5031 tree v = create_tmp_var (integer_type_node);
5032 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
5033 gimple *g = gimple_build_call (x, 0);
5034 gimple_call_set_lhs (g, v);
5035 gimple_seq_add_stmt (ilist, g);
5036 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
5037 tskred_temp = OMP_CLAUSE_DECL (c);
5038 if (is_taskreg_ctx (ctx))
5039 tskred_temp = lookup_decl (tskred_temp, ctx);
5040 tree v2 = create_tmp_var (sizetype);
5041 g = gimple_build_assign (v2, NOP_EXPR, v);
5042 gimple_seq_add_stmt (ilist, g);
5043 if (ctx->task_reductions[0])
5044 v = fold_convert (sizetype, ctx->task_reductions[0]);
5045 else
5046 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
5047 tree v3 = create_tmp_var (sizetype);
5048 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
5049 gimple_seq_add_stmt (ilist, g);
5050 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
5051 tskred_base = create_tmp_var (ptr_type_node);
5052 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
5053 gimple_seq_add_stmt (ilist, g);
5055 task_reduction_cnt = 0;
5056 task_reduction_cntorig = 0;
5057 task_reduction_other_cnt = 0;
5058 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5060 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
5061 tree var, new_var;
5062 bool by_ref;
5063 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5064 bool task_reduction_p = false;
5065 bool task_reduction_needs_orig_p = false;
5066 tree cond = NULL_TREE;
5067 tree allocator, allocate_ptr;
5069 switch (c_kind)
5071 case OMP_CLAUSE_PRIVATE:
5072 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
5073 continue;
5074 break;
5075 case OMP_CLAUSE_SHARED:
5076 /* Ignore shared directives in teams construct inside
5077 of target construct. */
5078 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5079 && !is_host_teams_ctx (ctx))
5080 continue;
5081 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5083 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5084 || is_global_var (OMP_CLAUSE_DECL (c)));
5085 continue;
5087 case OMP_CLAUSE_FIRSTPRIVATE:
5088 case OMP_CLAUSE_COPYIN:
5089 break;
5090 case OMP_CLAUSE_LINEAR:
5091 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5092 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5093 lastprivate_firstprivate = true;
5094 break;
5095 case OMP_CLAUSE_REDUCTION:
5096 case OMP_CLAUSE_IN_REDUCTION:
5097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5098 || is_task_ctx (ctx)
5099 || OMP_CLAUSE_REDUCTION_TASK (c))
5101 task_reduction_p = true;
5102 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5104 task_reduction_other_cnt++;
5105 if (pass == 2)
5106 continue;
5108 else
5109 task_reduction_cnt++;
5110 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5112 var = OMP_CLAUSE_DECL (c);
5113 /* If var is a global variable that isn't privatized
5114 in outer contexts, we don't need to look up the
5115 original address, it is always the address of the
5116 global variable itself. */
5117 if (!DECL_P (var)
5118 || omp_privatize_by_reference (var)
5119 || !is_global_var
5120 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5122 task_reduction_needs_orig_p = true;
5123 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5124 task_reduction_cntorig++;
5128 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5129 reduction_omp_orig_ref = true;
5130 break;
5131 case OMP_CLAUSE__REDUCTEMP_:
5132 if (!is_taskreg_ctx (ctx))
5133 continue;
5134 /* FALLTHRU */
5135 case OMP_CLAUSE__LOOPTEMP_:
5136 /* Handle _looptemp_/_reductemp_ clauses only on
5137 parallel/task. */
5138 if (fd)
5139 continue;
5140 break;
5141 case OMP_CLAUSE_LASTPRIVATE:
5142 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5144 lastprivate_firstprivate = true;
5145 if (pass != 0 || is_taskloop_ctx (ctx))
5146 continue;
5148 /* Even without corresponding firstprivate, if
5149 decl is Fortran allocatable, it needs outer var
5150 reference. */
5151 else if (pass == 0
5152 && lang_hooks.decls.omp_private_outer_ref
5153 (OMP_CLAUSE_DECL (c)))
5154 lastprivate_firstprivate = true;
5155 break;
5156 case OMP_CLAUSE_ALIGNED:
5157 if (pass != 1)
5158 continue;
5159 var = OMP_CLAUSE_DECL (c);
5160 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5161 && !is_global_var (var))
5163 new_var = maybe_lookup_decl (var, ctx);
5164 if (new_var == NULL_TREE)
5165 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5166 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5167 tree alarg = omp_clause_aligned_alignment (c);
5168 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5169 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5170 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5171 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5172 gimplify_and_add (x, ilist);
5174 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5175 && is_global_var (var))
5177 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5178 new_var = lookup_decl (var, ctx);
5179 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5180 t = build_fold_addr_expr_loc (clause_loc, t);
5181 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5182 tree alarg = omp_clause_aligned_alignment (c);
5183 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5184 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5185 t = fold_convert_loc (clause_loc, ptype, t);
5186 x = create_tmp_var (ptype);
5187 t = build2 (MODIFY_EXPR, ptype, x, t);
5188 gimplify_and_add (t, ilist);
5189 t = build_simple_mem_ref_loc (clause_loc, x);
5190 SET_DECL_VALUE_EXPR (new_var, t);
5191 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5193 continue;
5194 case OMP_CLAUSE__CONDTEMP_:
5195 if (is_parallel_ctx (ctx)
5196 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5197 break;
5198 continue;
5199 default:
5200 continue;
5203 if (task_reduction_p != (pass >= 2))
5204 continue;
5206 allocator = NULL_TREE;
5207 allocate_ptr = NULL_TREE;
5208 new_var = var = OMP_CLAUSE_DECL (c);
5209 if ((c_kind == OMP_CLAUSE_REDUCTION
5210 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5211 && TREE_CODE (var) == MEM_REF)
5213 var = TREE_OPERAND (var, 0);
5214 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5215 var = TREE_OPERAND (var, 0);
5216 if (TREE_CODE (var) == INDIRECT_REF
5217 || TREE_CODE (var) == ADDR_EXPR)
5218 var = TREE_OPERAND (var, 0);
5219 if (is_variable_sized (var))
5221 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5222 var = DECL_VALUE_EXPR (var);
5223 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5224 var = TREE_OPERAND (var, 0);
5225 gcc_assert (DECL_P (var));
5227 new_var = var;
5229 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5231 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5232 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5234 else if (c_kind != OMP_CLAUSE_COPYIN)
5235 new_var = lookup_decl (var, ctx);
5237 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5239 if (pass != 0)
5240 continue;
5242 /* C/C++ array section reductions. */
5243 else if ((c_kind == OMP_CLAUSE_REDUCTION
5244 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5245 && var != OMP_CLAUSE_DECL (c))
5247 if (pass == 0)
5248 continue;
5250 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5251 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5253 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5255 tree b = TREE_OPERAND (orig_var, 1);
5256 if (is_omp_target (ctx->stmt))
5257 b = NULL_TREE;
5258 else
5259 b = maybe_lookup_decl (b, ctx);
5260 if (b == NULL)
5262 b = TREE_OPERAND (orig_var, 1);
5263 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5265 if (integer_zerop (bias))
5266 bias = b;
5267 else
5269 bias = fold_convert_loc (clause_loc,
5270 TREE_TYPE (b), bias);
5271 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5272 TREE_TYPE (b), b, bias);
5274 orig_var = TREE_OPERAND (orig_var, 0);
5276 if (pass == 2)
5278 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5279 if (is_global_var (out)
5280 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5281 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5282 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5283 != POINTER_TYPE)))
5284 x = var;
5285 else if (is_omp_target (ctx->stmt))
5286 x = out;
5287 else
5289 bool by_ref = use_pointer_for_field (var, NULL);
5290 x = build_receiver_ref (var, by_ref, ctx);
5291 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5292 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5293 == POINTER_TYPE))
5294 x = build_fold_addr_expr (x);
5296 if (TREE_CODE (orig_var) == INDIRECT_REF)
5297 x = build_simple_mem_ref (x);
5298 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5300 if (var == TREE_OPERAND (orig_var, 0))
5301 x = build_fold_addr_expr (x);
5303 bias = fold_convert (sizetype, bias);
5304 x = fold_convert (ptr_type_node, x);
5305 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5306 TREE_TYPE (x), x, bias);
5307 unsigned cnt = task_reduction_cnt - 1;
5308 if (!task_reduction_needs_orig_p)
5309 cnt += (task_reduction_cntorig_full
5310 - task_reduction_cntorig);
5311 else
5312 cnt = task_reduction_cntorig - 1;
5313 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5314 size_int (cnt), NULL_TREE, NULL_TREE);
5315 gimplify_assign (r, x, ilist);
5316 continue;
5319 if (TREE_CODE (orig_var) == INDIRECT_REF
5320 || TREE_CODE (orig_var) == ADDR_EXPR)
5321 orig_var = TREE_OPERAND (orig_var, 0);
5322 tree d = OMP_CLAUSE_DECL (c);
5323 tree type = TREE_TYPE (d);
5324 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5325 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5326 tree sz = v;
5327 const char *name = get_name (orig_var);
5328 if (pass != 3 && !TREE_CONSTANT (v))
5330 tree t;
5331 if (is_omp_target (ctx->stmt))
5332 t = NULL_TREE;
5333 else
5334 t = maybe_lookup_decl (v, ctx);
5335 if (t)
5336 v = t;
5337 else
5338 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5339 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5340 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5341 TREE_TYPE (v), v,
5342 build_int_cst (TREE_TYPE (v), 1));
5343 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5344 TREE_TYPE (v), t,
5345 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5347 if (pass == 3)
5349 tree xv = create_tmp_var (ptr_type_node);
5350 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5352 unsigned cnt = task_reduction_cnt - 1;
5353 if (!task_reduction_needs_orig_p)
5354 cnt += (task_reduction_cntorig_full
5355 - task_reduction_cntorig);
5356 else
5357 cnt = task_reduction_cntorig - 1;
5358 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5359 size_int (cnt), NULL_TREE, NULL_TREE);
5361 gimple *g = gimple_build_assign (xv, x);
5362 gimple_seq_add_stmt (ilist, g);
5364 else
5366 unsigned int idx = *ctx->task_reduction_map->get (c);
5367 tree off;
5368 if (ctx->task_reductions[1 + idx])
5369 off = fold_convert (sizetype,
5370 ctx->task_reductions[1 + idx]);
5371 else
5372 off = task_reduction_read (ilist, tskred_temp, sizetype,
5373 7 + 3 * idx + 1);
5374 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5375 tskred_base, off);
5376 gimple_seq_add_stmt (ilist, g);
5378 x = fold_convert (build_pointer_type (boolean_type_node),
5379 xv);
5380 if (TREE_CONSTANT (v))
5381 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5382 TYPE_SIZE_UNIT (type));
5383 else
5385 tree t;
5386 if (is_omp_target (ctx->stmt))
5387 t = NULL_TREE;
5388 else
5389 t = maybe_lookup_decl (v, ctx);
5390 if (t)
5391 v = t;
5392 else
5393 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5394 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5395 fb_rvalue);
5396 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5397 TREE_TYPE (v), v,
5398 build_int_cst (TREE_TYPE (v), 1));
5399 t = fold_build2_loc (clause_loc, MULT_EXPR,
5400 TREE_TYPE (v), t,
5401 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5402 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5404 cond = create_tmp_var (TREE_TYPE (x));
5405 gimplify_assign (cond, x, ilist);
5406 x = xv;
5408 else if (lower_private_allocate (var, type, allocator,
5409 allocate_ptr, ilist, ctx,
5410 true,
5411 TREE_CONSTANT (v)
5412 ? TYPE_SIZE_UNIT (type)
5413 : sz))
5414 x = allocate_ptr;
5415 else if (TREE_CONSTANT (v))
5417 x = create_tmp_var_raw (type, name);
5418 gimple_add_tmp_var (x);
5419 TREE_ADDRESSABLE (x) = 1;
5420 x = build_fold_addr_expr_loc (clause_loc, x);
5422 else
5424 tree atmp
5425 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5426 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5427 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5430 tree ptype = build_pointer_type (TREE_TYPE (type));
5431 x = fold_convert_loc (clause_loc, ptype, x);
5432 tree y = create_tmp_var (ptype, name);
5433 gimplify_assign (y, x, ilist);
5434 x = y;
5435 tree yb = y;
5437 if (!integer_zerop (bias))
5439 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5440 bias);
5441 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5443 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5444 pointer_sized_int_node, yb, bias);
5445 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5446 yb = create_tmp_var (ptype, name);
5447 gimplify_assign (yb, x, ilist);
5448 x = yb;
5451 d = TREE_OPERAND (d, 0);
5452 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5453 d = TREE_OPERAND (d, 0);
5454 if (TREE_CODE (d) == ADDR_EXPR)
5456 if (orig_var != var)
5458 gcc_assert (is_variable_sized (orig_var));
5459 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5461 gimplify_assign (new_var, x, ilist);
5462 tree new_orig_var = lookup_decl (orig_var, ctx);
5463 tree t = build_fold_indirect_ref (new_var);
5464 DECL_IGNORED_P (new_var) = 0;
5465 TREE_THIS_NOTRAP (t) = 1;
5466 SET_DECL_VALUE_EXPR (new_orig_var, t);
5467 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5469 else
5471 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5472 build_int_cst (ptype, 0));
5473 SET_DECL_VALUE_EXPR (new_var, x);
5474 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5477 else
5479 gcc_assert (orig_var == var);
5480 if (TREE_CODE (d) == INDIRECT_REF)
5482 x = create_tmp_var (ptype, name);
5483 TREE_ADDRESSABLE (x) = 1;
5484 gimplify_assign (x, yb, ilist);
5485 x = build_fold_addr_expr_loc (clause_loc, x);
5487 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5488 gimplify_assign (new_var, x, ilist);
5490 /* GOMP_taskgroup_reduction_register memsets the whole
5491 array to zero. If the initializer is zero, we don't
5492 need to initialize it again, just mark it as ever
5493 used unconditionally, i.e. cond = true. */
5494 if (cond
5495 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5496 && initializer_zerop (omp_reduction_init (c,
5497 TREE_TYPE (type))))
5499 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5500 boolean_true_node);
5501 gimple_seq_add_stmt (ilist, g);
5502 continue;
5504 tree end = create_artificial_label (UNKNOWN_LOCATION);
5505 if (cond)
5507 gimple *g;
5508 if (!is_parallel_ctx (ctx))
5510 tree condv = create_tmp_var (boolean_type_node);
5511 g = gimple_build_assign (condv,
5512 build_simple_mem_ref (cond));
5513 gimple_seq_add_stmt (ilist, g);
5514 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5515 g = gimple_build_cond (NE_EXPR, condv,
5516 boolean_false_node, end, lab1);
5517 gimple_seq_add_stmt (ilist, g);
5518 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5520 g = gimple_build_assign (build_simple_mem_ref (cond),
5521 boolean_true_node);
5522 gimple_seq_add_stmt (ilist, g);
5525 tree y1 = create_tmp_var (ptype);
5526 gimplify_assign (y1, y, ilist);
5527 tree i2 = NULL_TREE, y2 = NULL_TREE;
5528 tree body2 = NULL_TREE, end2 = NULL_TREE;
5529 tree y3 = NULL_TREE, y4 = NULL_TREE;
5530 if (task_reduction_needs_orig_p)
5532 y3 = create_tmp_var (ptype);
5533 tree ref;
5534 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5535 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5536 size_int (task_reduction_cnt_full
5537 + task_reduction_cntorig - 1),
5538 NULL_TREE, NULL_TREE);
5539 else
5541 unsigned int idx = *ctx->task_reduction_map->get (c);
5542 ref = task_reduction_read (ilist, tskred_temp, ptype,
5543 7 + 3 * idx);
5545 gimplify_assign (y3, ref, ilist);
5547 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5549 if (pass != 3)
5551 y2 = create_tmp_var (ptype);
5552 gimplify_assign (y2, y, ilist);
5554 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5556 tree ref = build_outer_var_ref (var, ctx);
5557 /* For ref build_outer_var_ref already performs this. */
5558 if (TREE_CODE (d) == INDIRECT_REF)
5559 gcc_assert (omp_privatize_by_reference (var));
5560 else if (TREE_CODE (d) == ADDR_EXPR)
5561 ref = build_fold_addr_expr (ref);
5562 else if (omp_privatize_by_reference (var))
5563 ref = build_fold_addr_expr (ref);
5564 ref = fold_convert_loc (clause_loc, ptype, ref);
5565 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5566 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5568 y3 = create_tmp_var (ptype);
5569 gimplify_assign (y3, unshare_expr (ref), ilist);
5571 if (is_simd)
5573 y4 = create_tmp_var (ptype);
5574 gimplify_assign (y4, ref, dlist);
5578 tree i = create_tmp_var (TREE_TYPE (v));
5579 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5580 tree body = create_artificial_label (UNKNOWN_LOCATION);
5581 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5582 if (y2)
5584 i2 = create_tmp_var (TREE_TYPE (v));
5585 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5586 body2 = create_artificial_label (UNKNOWN_LOCATION);
5587 end2 = create_artificial_label (UNKNOWN_LOCATION);
5588 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5590 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5592 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5593 tree decl_placeholder
5594 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5595 SET_DECL_VALUE_EXPR (decl_placeholder,
5596 build_simple_mem_ref (y1));
5597 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5598 SET_DECL_VALUE_EXPR (placeholder,
5599 y3 ? build_simple_mem_ref (y3)
5600 : error_mark_node);
5601 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5602 x = lang_hooks.decls.omp_clause_default_ctor
5603 (c, build_simple_mem_ref (y1),
5604 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5605 if (x)
5606 gimplify_and_add (x, ilist);
5607 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5609 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5610 lower_omp (&tseq, ctx);
5611 gimple_seq_add_seq (ilist, tseq);
5613 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5614 if (is_simd)
5616 SET_DECL_VALUE_EXPR (decl_placeholder,
5617 build_simple_mem_ref (y2));
5618 SET_DECL_VALUE_EXPR (placeholder,
5619 build_simple_mem_ref (y4));
5620 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5621 lower_omp (&tseq, ctx);
5622 gimple_seq_add_seq (dlist, tseq);
5623 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5625 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5626 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5627 if (y2)
5629 x = lang_hooks.decls.omp_clause_dtor
5630 (c, build_simple_mem_ref (y2));
5631 if (x)
5632 gimplify_and_add (x, dlist);
5635 else
5637 x = omp_reduction_init (c, TREE_TYPE (type));
5638 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5640 /* reduction(-:var) sums up the partial results, so it
5641 acts identically to reduction(+:var). */
5642 if (code == MINUS_EXPR)
5643 code = PLUS_EXPR;
5645 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5646 if (is_simd)
5648 x = build2 (code, TREE_TYPE (type),
5649 build_simple_mem_ref (y4),
5650 build_simple_mem_ref (y2));
5651 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5654 gimple *g
5655 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5656 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5657 gimple_seq_add_stmt (ilist, g);
5658 if (y3)
5660 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5661 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5662 gimple_seq_add_stmt (ilist, g);
5664 g = gimple_build_assign (i, PLUS_EXPR, i,
5665 build_int_cst (TREE_TYPE (i), 1));
5666 gimple_seq_add_stmt (ilist, g);
5667 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5668 gimple_seq_add_stmt (ilist, g);
5669 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5670 if (y2)
5672 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5673 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5674 gimple_seq_add_stmt (dlist, g);
5675 if (y4)
5677 g = gimple_build_assign
5678 (y4, POINTER_PLUS_EXPR, y4,
5679 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5680 gimple_seq_add_stmt (dlist, g);
5682 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5683 build_int_cst (TREE_TYPE (i2), 1));
5684 gimple_seq_add_stmt (dlist, g);
5685 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5686 gimple_seq_add_stmt (dlist, g);
5687 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5689 if (allocator)
5691 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5692 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5693 gimple_seq_add_stmt (dlist, g);
5695 continue;
5697 else if (pass == 2)
5699 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5700 if (is_global_var (out))
5701 x = var;
5702 else if (is_omp_target (ctx->stmt))
5703 x = out;
5704 else
5706 bool by_ref = use_pointer_for_field (var, ctx);
5707 x = build_receiver_ref (var, by_ref, ctx);
5709 if (!omp_privatize_by_reference (var))
5710 x = build_fold_addr_expr (x);
5711 x = fold_convert (ptr_type_node, x);
5712 unsigned cnt = task_reduction_cnt - 1;
5713 if (!task_reduction_needs_orig_p)
5714 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5715 else
5716 cnt = task_reduction_cntorig - 1;
5717 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5718 size_int (cnt), NULL_TREE, NULL_TREE);
5719 gimplify_assign (r, x, ilist);
5720 continue;
5722 else if (pass == 3)
5724 tree type = TREE_TYPE (new_var);
5725 if (!omp_privatize_by_reference (var))
5726 type = build_pointer_type (type);
5727 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5729 unsigned cnt = task_reduction_cnt - 1;
5730 if (!task_reduction_needs_orig_p)
5731 cnt += (task_reduction_cntorig_full
5732 - task_reduction_cntorig);
5733 else
5734 cnt = task_reduction_cntorig - 1;
5735 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5736 size_int (cnt), NULL_TREE, NULL_TREE);
5738 else
5740 unsigned int idx = *ctx->task_reduction_map->get (c);
5741 tree off;
5742 if (ctx->task_reductions[1 + idx])
5743 off = fold_convert (sizetype,
5744 ctx->task_reductions[1 + idx]);
5745 else
5746 off = task_reduction_read (ilist, tskred_temp, sizetype,
5747 7 + 3 * idx + 1);
5748 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5749 tskred_base, off);
5751 x = fold_convert (type, x);
5752 tree t;
5753 if (omp_privatize_by_reference (var))
5755 gimplify_assign (new_var, x, ilist);
5756 t = new_var;
5757 new_var = build_simple_mem_ref (new_var);
5759 else
5761 t = create_tmp_var (type);
5762 gimplify_assign (t, x, ilist);
5763 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5764 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5766 t = fold_convert (build_pointer_type (boolean_type_node), t);
5767 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5768 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5769 cond = create_tmp_var (TREE_TYPE (t));
5770 gimplify_assign (cond, t, ilist);
5772 else if (is_variable_sized (var))
5774 /* For variable sized types, we need to allocate the
5775 actual storage here. Call alloca and store the
5776 result in the pointer decl that we created elsewhere. */
5777 if (pass == 0)
5778 continue;
5780 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5782 tree tmp;
5784 ptr = DECL_VALUE_EXPR (new_var);
5785 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5786 ptr = TREE_OPERAND (ptr, 0);
5787 gcc_assert (DECL_P (ptr));
5788 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5790 if (lower_private_allocate (var, new_var, allocator,
5791 allocate_ptr, ilist, ctx,
5792 false, x))
5793 tmp = allocate_ptr;
5794 else
5796 /* void *tmp = __builtin_alloca */
5797 tree atmp
5798 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5799 gcall *stmt
5800 = gimple_build_call (atmp, 2, x,
5801 size_int (DECL_ALIGN (var)));
5802 cfun->calls_alloca = 1;
5803 tmp = create_tmp_var_raw (ptr_type_node);
5804 gimple_add_tmp_var (tmp);
5805 gimple_call_set_lhs (stmt, tmp);
5807 gimple_seq_add_stmt (ilist, stmt);
5810 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5811 gimplify_assign (ptr, x, ilist);
5814 else if (omp_privatize_by_reference (var)
5815 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5816 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5818 /* For references that are being privatized for Fortran,
5819 allocate new backing storage for the new pointer
5820 variable. This allows us to avoid changing all the
5821 code that expects a pointer to something that expects
5822 a direct variable. */
5823 if (pass == 0)
5824 continue;
5826 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5827 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5829 x = build_receiver_ref (var, false, ctx);
5830 if (ctx->allocate_map)
5831 if (tree *allocatep = ctx->allocate_map->get (var))
5833 allocator = *allocatep;
5834 if (TREE_CODE (allocator) == TREE_LIST)
5835 allocator = TREE_PURPOSE (allocator);
5836 if (TREE_CODE (allocator) != INTEGER_CST)
5837 allocator = build_outer_var_ref (allocator, ctx);
5838 allocator = fold_convert (pointer_sized_int_node,
5839 allocator);
5840 allocate_ptr = unshare_expr (x);
5842 if (allocator == NULL_TREE)
5843 x = build_fold_addr_expr_loc (clause_loc, x);
5845 else if (lower_private_allocate (var, new_var, allocator,
5846 allocate_ptr,
5847 ilist, ctx, true, x))
5848 x = allocate_ptr;
5849 else if (TREE_CONSTANT (x))
5851 /* For reduction in SIMD loop, defer adding the
5852 initialization of the reference, because if we decide
5853 to use SIMD array for it, the initilization could cause
5854 expansion ICE. Ditto for other privatization clauses. */
5855 if (is_simd)
5856 x = NULL_TREE;
5857 else
5859 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5860 get_name (var));
5861 gimple_add_tmp_var (x);
5862 TREE_ADDRESSABLE (x) = 1;
5863 x = build_fold_addr_expr_loc (clause_loc, x);
5866 else
5868 tree atmp
5869 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5870 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5871 tree al = size_int (TYPE_ALIGN (rtype));
5872 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5875 if (x)
5877 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5878 gimplify_assign (new_var, x, ilist);
5881 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5883 else if ((c_kind == OMP_CLAUSE_REDUCTION
5884 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5885 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5887 if (pass == 0)
5888 continue;
5890 else if (pass != 0)
5891 continue;
5893 switch (OMP_CLAUSE_CODE (c))
5895 case OMP_CLAUSE_SHARED:
5896 /* Ignore shared directives in teams construct inside
5897 target construct. */
5898 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5899 && !is_host_teams_ctx (ctx))
5900 continue;
5901 /* Shared global vars are just accessed directly. */
5902 if (is_global_var (new_var))
5903 break;
5904 /* For taskloop firstprivate/lastprivate, represented
5905 as firstprivate and shared clause on the task, new_var
5906 is the firstprivate var. */
5907 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5908 break;
5909 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5910 needs to be delayed until after fixup_child_record_type so
5911 that we get the correct type during the dereference. */
5912 by_ref = use_pointer_for_field (var, ctx);
5913 x = build_receiver_ref (var, by_ref, ctx);
5914 SET_DECL_VALUE_EXPR (new_var, x);
5915 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5917 /* ??? If VAR is not passed by reference, and the variable
5918 hasn't been initialized yet, then we'll get a warning for
5919 the store into the omp_data_s structure. Ideally, we'd be
5920 able to notice this and not store anything at all, but
5921 we're generating code too early. Suppress the warning. */
5922 if (!by_ref)
5923 suppress_warning (var, OPT_Wuninitialized);
5924 break;
5926 case OMP_CLAUSE__CONDTEMP_:
5927 if (is_parallel_ctx (ctx))
5929 x = build_receiver_ref (var, false, ctx);
5930 SET_DECL_VALUE_EXPR (new_var, x);
5931 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5933 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5935 x = build_zero_cst (TREE_TYPE (var));
5936 goto do_private;
5938 break;
5940 case OMP_CLAUSE_LASTPRIVATE:
5941 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5942 break;
5943 /* FALLTHRU */
5945 case OMP_CLAUSE_PRIVATE:
5946 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5947 x = build_outer_var_ref (var, ctx);
5948 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5950 if (is_task_ctx (ctx))
5951 x = build_receiver_ref (var, false, ctx);
5952 else
5953 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5955 else
5956 x = NULL;
5957 do_private:
5958 tree nx;
5959 bool copy_ctor;
5960 copy_ctor = false;
5961 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5962 ilist, ctx, false, NULL_TREE);
5963 nx = unshare_expr (new_var);
5964 if (is_simd
5965 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5966 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5967 copy_ctor = true;
5968 if (copy_ctor)
5969 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5970 else
5971 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5972 if (is_simd)
5974 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5975 if ((TREE_ADDRESSABLE (new_var) || nx || y
5976 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5977 && (gimple_omp_for_collapse (ctx->stmt) != 1
5978 || (gimple_omp_for_index (ctx->stmt, 0)
5979 != new_var)))
5980 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5981 || omp_privatize_by_reference (var))
5982 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5983 ivar, lvar))
5985 if (omp_privatize_by_reference (var))
5987 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5988 tree new_vard = TREE_OPERAND (new_var, 0);
5989 gcc_assert (DECL_P (new_vard));
5990 SET_DECL_VALUE_EXPR (new_vard,
5991 build_fold_addr_expr (lvar));
5992 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5995 if (nx)
5997 tree iv = unshare_expr (ivar);
5998 if (copy_ctor)
5999 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
6001 else
6002 x = lang_hooks.decls.omp_clause_default_ctor (c,
6006 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
6008 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
6009 unshare_expr (ivar), x);
6010 nx = x;
6012 if (nx && x)
6013 gimplify_and_add (x, &llist[0]);
6014 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6015 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6017 tree v = new_var;
6018 if (!DECL_P (v))
6020 gcc_assert (TREE_CODE (v) == MEM_REF);
6021 v = TREE_OPERAND (v, 0);
6022 gcc_assert (DECL_P (v));
6024 v = *ctx->lastprivate_conditional_map->get (v);
6025 tree t = create_tmp_var (TREE_TYPE (v));
6026 tree z = build_zero_cst (TREE_TYPE (v));
6027 tree orig_v
6028 = build_outer_var_ref (var, ctx,
6029 OMP_CLAUSE_LASTPRIVATE);
6030 gimple_seq_add_stmt (dlist,
6031 gimple_build_assign (t, z));
6032 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
6033 tree civar = DECL_VALUE_EXPR (v);
6034 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
6035 civar = unshare_expr (civar);
6036 TREE_OPERAND (civar, 1) = sctx.idx;
6037 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
6038 unshare_expr (civar));
6039 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
6040 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
6041 orig_v, unshare_expr (ivar)));
6042 tree cond = build2 (LT_EXPR, boolean_type_node, t,
6043 civar);
6044 x = build3 (COND_EXPR, void_type_node, cond, x,
6045 void_node);
6046 gimple_seq tseq = NULL;
6047 gimplify_and_add (x, &tseq);
6048 if (ctx->outer)
6049 lower_omp (&tseq, ctx->outer);
6050 gimple_seq_add_seq (&llist[1], tseq);
6052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6053 && ctx->for_simd_scan_phase)
6055 x = unshare_expr (ivar);
6056 tree orig_v
6057 = build_outer_var_ref (var, ctx,
6058 OMP_CLAUSE_LASTPRIVATE);
6059 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6060 orig_v);
6061 gimplify_and_add (x, &llist[0]);
6063 if (y)
6065 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
6066 if (y)
6067 gimplify_and_add (y, &llist[1]);
6069 break;
6071 if (omp_privatize_by_reference (var))
6073 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6074 tree new_vard = TREE_OPERAND (new_var, 0);
6075 gcc_assert (DECL_P (new_vard));
6076 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6077 x = TYPE_SIZE_UNIT (type);
6078 if (TREE_CONSTANT (x))
6080 x = create_tmp_var_raw (type, get_name (var));
6081 gimple_add_tmp_var (x);
6082 TREE_ADDRESSABLE (x) = 1;
6083 x = build_fold_addr_expr_loc (clause_loc, x);
6084 x = fold_convert_loc (clause_loc,
6085 TREE_TYPE (new_vard), x);
6086 gimplify_assign (new_vard, x, ilist);
6090 if (nx)
6091 gimplify_and_add (nx, ilist);
6092 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6093 && is_simd
6094 && ctx->for_simd_scan_phase)
6096 tree orig_v = build_outer_var_ref (var, ctx,
6097 OMP_CLAUSE_LASTPRIVATE);
6098 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6099 orig_v);
6100 gimplify_and_add (x, ilist);
6102 /* FALLTHRU */
6104 do_dtor:
6105 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6106 if (x)
6107 gimplify_and_add (x, dlist);
6108 if (allocator)
6110 if (!is_gimple_val (allocator))
6112 tree avar = create_tmp_var (TREE_TYPE (allocator));
6113 gimplify_assign (avar, allocator, dlist);
6114 allocator = avar;
6116 if (!is_gimple_val (allocate_ptr))
6118 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6119 gimplify_assign (apvar, allocate_ptr, dlist);
6120 allocate_ptr = apvar;
6122 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6123 gimple *g
6124 = gimple_build_call (f, 2, allocate_ptr, allocator);
6125 gimple_seq_add_stmt (dlist, g);
6127 break;
6129 case OMP_CLAUSE_LINEAR:
6130 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6131 goto do_firstprivate;
6132 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6133 x = NULL;
6134 else
6135 x = build_outer_var_ref (var, ctx);
6136 goto do_private;
6138 case OMP_CLAUSE_FIRSTPRIVATE:
6139 if (is_task_ctx (ctx))
6141 if ((omp_privatize_by_reference (var)
6142 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6143 || is_variable_sized (var))
6144 goto do_dtor;
6145 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6146 ctx))
6147 || use_pointer_for_field (var, NULL))
6149 x = build_receiver_ref (var, false, ctx);
6150 if (ctx->allocate_map)
6151 if (tree *allocatep = ctx->allocate_map->get (var))
6153 allocator = *allocatep;
6154 if (TREE_CODE (allocator) == TREE_LIST)
6155 allocator = TREE_PURPOSE (allocator);
6156 if (TREE_CODE (allocator) != INTEGER_CST)
6157 allocator = build_outer_var_ref (allocator, ctx);
6158 allocator = fold_convert (pointer_sized_int_node,
6159 allocator);
6160 allocate_ptr = unshare_expr (x);
6161 x = build_simple_mem_ref (x);
6162 TREE_THIS_NOTRAP (x) = 1;
6164 SET_DECL_VALUE_EXPR (new_var, x);
6165 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6166 goto do_dtor;
6169 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6170 && omp_privatize_by_reference (var))
6172 x = build_outer_var_ref (var, ctx);
6173 gcc_assert (TREE_CODE (x) == MEM_REF
6174 && integer_zerop (TREE_OPERAND (x, 1)));
6175 x = TREE_OPERAND (x, 0);
6176 x = lang_hooks.decls.omp_clause_copy_ctor
6177 (c, unshare_expr (new_var), x);
6178 gimplify_and_add (x, ilist);
6179 goto do_dtor;
6181 do_firstprivate:
6182 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6183 ilist, ctx, false, NULL_TREE);
6184 x = build_outer_var_ref (var, ctx);
6185 if (is_simd)
6187 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6188 && gimple_omp_for_combined_into_p (ctx->stmt))
6190 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6191 tree stept = TREE_TYPE (t);
6192 tree ct = omp_find_clause (clauses,
6193 OMP_CLAUSE__LOOPTEMP_);
6194 gcc_assert (ct);
6195 tree l = OMP_CLAUSE_DECL (ct);
6196 tree n1 = fd->loop.n1;
6197 tree step = fd->loop.step;
6198 tree itype = TREE_TYPE (l);
6199 if (POINTER_TYPE_P (itype))
6200 itype = signed_type_for (itype);
6201 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6202 if (TYPE_UNSIGNED (itype)
6203 && fd->loop.cond_code == GT_EXPR)
6204 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6205 fold_build1 (NEGATE_EXPR, itype, l),
6206 fold_build1 (NEGATE_EXPR,
6207 itype, step));
6208 else
6209 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6210 t = fold_build2 (MULT_EXPR, stept,
6211 fold_convert (stept, l), t);
6213 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6215 if (omp_privatize_by_reference (var))
6217 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6218 tree new_vard = TREE_OPERAND (new_var, 0);
6219 gcc_assert (DECL_P (new_vard));
6220 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6221 nx = TYPE_SIZE_UNIT (type);
6222 if (TREE_CONSTANT (nx))
6224 nx = create_tmp_var_raw (type,
6225 get_name (var));
6226 gimple_add_tmp_var (nx);
6227 TREE_ADDRESSABLE (nx) = 1;
6228 nx = build_fold_addr_expr_loc (clause_loc,
6229 nx);
6230 nx = fold_convert_loc (clause_loc,
6231 TREE_TYPE (new_vard),
6232 nx);
6233 gimplify_assign (new_vard, nx, ilist);
6237 x = lang_hooks.decls.omp_clause_linear_ctor
6238 (c, new_var, x, t);
6239 gimplify_and_add (x, ilist);
6240 goto do_dtor;
6243 if (POINTER_TYPE_P (TREE_TYPE (x)))
6244 x = fold_build2 (POINTER_PLUS_EXPR,
6245 TREE_TYPE (x), x, t);
6246 else
6247 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6250 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6251 || TREE_ADDRESSABLE (new_var)
6252 || omp_privatize_by_reference (var))
6253 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6254 ivar, lvar))
6256 if (omp_privatize_by_reference (var))
6258 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6259 tree new_vard = TREE_OPERAND (new_var, 0);
6260 gcc_assert (DECL_P (new_vard));
6261 SET_DECL_VALUE_EXPR (new_vard,
6262 build_fold_addr_expr (lvar));
6263 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6265 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6267 tree iv = create_tmp_var (TREE_TYPE (new_var));
6268 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6269 gimplify_and_add (x, ilist);
6270 gimple_stmt_iterator gsi
6271 = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6272 gassign *g
6273 = gimple_build_assign (unshare_expr (lvar), iv);
6274 gsi_insert_before_without_update (&gsi, g,
6275 GSI_SAME_STMT);
6276 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6277 enum tree_code code = PLUS_EXPR;
6278 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6279 code = POINTER_PLUS_EXPR;
6280 g = gimple_build_assign (iv, code, iv, t);
6281 gsi_insert_before_without_update (&gsi, g,
6282 GSI_SAME_STMT);
6283 break;
6285 x = lang_hooks.decls.omp_clause_copy_ctor
6286 (c, unshare_expr (ivar), x);
6287 gimplify_and_add (x, &llist[0]);
6288 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6289 if (x)
6290 gimplify_and_add (x, &llist[1]);
6291 break;
6293 if (omp_privatize_by_reference (var))
6295 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6296 tree new_vard = TREE_OPERAND (new_var, 0);
6297 gcc_assert (DECL_P (new_vard));
6298 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6299 nx = TYPE_SIZE_UNIT (type);
6300 if (TREE_CONSTANT (nx))
6302 nx = create_tmp_var_raw (type, get_name (var));
6303 gimple_add_tmp_var (nx);
6304 TREE_ADDRESSABLE (nx) = 1;
6305 nx = build_fold_addr_expr_loc (clause_loc, nx);
6306 nx = fold_convert_loc (clause_loc,
6307 TREE_TYPE (new_vard), nx);
6308 gimplify_assign (new_vard, nx, ilist);
6312 x = lang_hooks.decls.omp_clause_copy_ctor
6313 (c, unshare_expr (new_var), x);
6314 gimplify_and_add (x, ilist);
6315 goto do_dtor;
6317 case OMP_CLAUSE__LOOPTEMP_:
6318 case OMP_CLAUSE__REDUCTEMP_:
6319 gcc_assert (is_taskreg_ctx (ctx));
6320 x = build_outer_var_ref (var, ctx);
6321 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6322 gimplify_and_add (x, ilist);
6323 break;
6325 case OMP_CLAUSE_COPYIN:
6326 by_ref = use_pointer_for_field (var, NULL);
6327 x = build_receiver_ref (var, by_ref, ctx);
6328 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6329 append_to_statement_list (x, &copyin_seq);
6330 copyin_by_ref |= by_ref;
6331 break;
6333 case OMP_CLAUSE_REDUCTION:
6334 case OMP_CLAUSE_IN_REDUCTION:
6335 /* OpenACC reductions are initialized using the
6336 GOACC_REDUCTION internal function. */
6337 if (is_gimple_omp_oacc (ctx->stmt))
6338 break;
6339 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6341 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6342 gimple *tseq;
6343 tree ptype = TREE_TYPE (placeholder);
6344 if (cond)
6346 x = error_mark_node;
6347 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6348 && !task_reduction_needs_orig_p)
6349 x = var;
6350 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6352 tree pptype = build_pointer_type (ptype);
6353 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6354 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6355 size_int (task_reduction_cnt_full
6356 + task_reduction_cntorig - 1),
6357 NULL_TREE, NULL_TREE);
6358 else
6360 unsigned int idx
6361 = *ctx->task_reduction_map->get (c);
6362 x = task_reduction_read (ilist, tskred_temp,
6363 pptype, 7 + 3 * idx);
6365 x = fold_convert (pptype, x);
6366 x = build_simple_mem_ref (x);
6369 else
6371 lower_private_allocate (var, new_var, allocator,
6372 allocate_ptr, ilist, ctx, false,
6373 NULL_TREE);
6374 x = build_outer_var_ref (var, ctx);
6376 if (omp_privatize_by_reference (var)
6377 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6378 x = build_fold_addr_expr_loc (clause_loc, x);
6380 SET_DECL_VALUE_EXPR (placeholder, x);
6381 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6382 tree new_vard = new_var;
6383 if (omp_privatize_by_reference (var))
6385 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6386 new_vard = TREE_OPERAND (new_var, 0);
6387 gcc_assert (DECL_P (new_vard));
6389 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6390 if (is_simd
6391 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6392 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6393 rvarp = &rvar;
6394 if (is_simd
6395 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6396 ivar, lvar, rvarp,
6397 &rvar2))
6399 if (new_vard == new_var)
6401 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6402 SET_DECL_VALUE_EXPR (new_var, ivar);
6404 else
6406 SET_DECL_VALUE_EXPR (new_vard,
6407 build_fold_addr_expr (ivar));
6408 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6410 x = lang_hooks.decls.omp_clause_default_ctor
6411 (c, unshare_expr (ivar),
6412 build_outer_var_ref (var, ctx));
6413 if (rvarp && ctx->for_simd_scan_phase)
6415 if (x)
6416 gimplify_and_add (x, &llist[0]);
6417 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6418 if (x)
6419 gimplify_and_add (x, &llist[1]);
6420 break;
6422 else if (rvarp)
6424 if (x)
6426 gimplify_and_add (x, &llist[0]);
6428 tree ivar2 = unshare_expr (lvar);
6429 TREE_OPERAND (ivar2, 1) = sctx.idx;
6430 x = lang_hooks.decls.omp_clause_default_ctor
6431 (c, ivar2, build_outer_var_ref (var, ctx));
6432 gimplify_and_add (x, &llist[0]);
6434 if (rvar2)
6436 x = lang_hooks.decls.omp_clause_default_ctor
6437 (c, unshare_expr (rvar2),
6438 build_outer_var_ref (var, ctx));
6439 gimplify_and_add (x, &llist[0]);
6442 /* For types that need construction, add another
6443 private var which will be default constructed
6444 and optionally initialized with
6445 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6446 loop we want to assign this value instead of
6447 constructing and destructing it in each
6448 iteration. */
6449 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6450 gimple_add_tmp_var (nv);
6451 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6452 ? rvar2
6453 : ivar, 0),
6454 nv);
6455 x = lang_hooks.decls.omp_clause_default_ctor
6456 (c, nv, build_outer_var_ref (var, ctx));
6457 gimplify_and_add (x, ilist);
6459 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6461 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6462 x = DECL_VALUE_EXPR (new_vard);
6463 tree vexpr = nv;
6464 if (new_vard != new_var)
6465 vexpr = build_fold_addr_expr (nv);
6466 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6467 lower_omp (&tseq, ctx);
6468 SET_DECL_VALUE_EXPR (new_vard, x);
6469 gimple_seq_add_seq (ilist, tseq);
6470 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6473 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6474 if (x)
6475 gimplify_and_add (x, dlist);
6478 tree ref = build_outer_var_ref (var, ctx);
6479 x = unshare_expr (ivar);
6480 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6481 ref);
6482 gimplify_and_add (x, &llist[0]);
6484 ref = build_outer_var_ref (var, ctx);
6485 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6486 rvar);
6487 gimplify_and_add (x, &llist[3]);
6489 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6490 if (new_vard == new_var)
6491 SET_DECL_VALUE_EXPR (new_var, lvar);
6492 else
6493 SET_DECL_VALUE_EXPR (new_vard,
6494 build_fold_addr_expr (lvar));
6496 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6497 if (x)
6498 gimplify_and_add (x, &llist[1]);
6500 tree ivar2 = unshare_expr (lvar);
6501 TREE_OPERAND (ivar2, 1) = sctx.idx;
6502 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6503 if (x)
6504 gimplify_and_add (x, &llist[1]);
6506 if (rvar2)
6508 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6509 if (x)
6510 gimplify_and_add (x, &llist[1]);
6512 break;
6514 if (x)
6515 gimplify_and_add (x, &llist[0]);
6516 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6518 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6519 lower_omp (&tseq, ctx);
6520 gimple_seq_add_seq (&llist[0], tseq);
6522 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6523 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6524 lower_omp (&tseq, ctx);
6525 gimple_seq_add_seq (&llist[1], tseq);
6526 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6527 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6528 if (new_vard == new_var)
6529 SET_DECL_VALUE_EXPR (new_var, lvar);
6530 else
6531 SET_DECL_VALUE_EXPR (new_vard,
6532 build_fold_addr_expr (lvar));
6533 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6534 if (x)
6535 gimplify_and_add (x, &llist[1]);
6536 break;
6538 /* If this is a reference to constant size reduction var
6539 with placeholder, we haven't emitted the initializer
6540 for it because it is undesirable if SIMD arrays are used.
6541 But if they aren't used, we need to emit the deferred
6542 initialization now. */
6543 else if (omp_privatize_by_reference (var) && is_simd)
6544 handle_simd_reference (clause_loc, new_vard, ilist);
6546 tree lab2 = NULL_TREE;
6547 if (cond)
6549 gimple *g;
6550 if (!is_parallel_ctx (ctx))
6552 tree condv = create_tmp_var (boolean_type_node);
6553 tree m = build_simple_mem_ref (cond);
6554 g = gimple_build_assign (condv, m);
6555 gimple_seq_add_stmt (ilist, g);
6556 tree lab1
6557 = create_artificial_label (UNKNOWN_LOCATION);
6558 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6559 g = gimple_build_cond (NE_EXPR, condv,
6560 boolean_false_node,
6561 lab2, lab1);
6562 gimple_seq_add_stmt (ilist, g);
6563 gimple_seq_add_stmt (ilist,
6564 gimple_build_label (lab1));
6566 g = gimple_build_assign (build_simple_mem_ref (cond),
6567 boolean_true_node);
6568 gimple_seq_add_stmt (ilist, g);
6570 x = lang_hooks.decls.omp_clause_default_ctor
6571 (c, unshare_expr (new_var),
6572 cond ? NULL_TREE
6573 : build_outer_var_ref (var, ctx));
6574 if (x)
6575 gimplify_and_add (x, ilist);
6577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6578 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6580 if (ctx->for_simd_scan_phase)
6581 goto do_dtor;
6582 if (x || (!is_simd
6583 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6585 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6586 gimple_add_tmp_var (nv);
6587 ctx->cb.decl_map->put (new_vard, nv);
6588 x = lang_hooks.decls.omp_clause_default_ctor
6589 (c, nv, build_outer_var_ref (var, ctx));
6590 if (x)
6591 gimplify_and_add (x, ilist);
6592 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6594 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6595 tree vexpr = nv;
6596 if (new_vard != new_var)
6597 vexpr = build_fold_addr_expr (nv);
6598 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6599 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6600 lower_omp (&tseq, ctx);
6601 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6602 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6603 gimple_seq_add_seq (ilist, tseq);
6605 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6606 if (is_simd && ctx->scan_exclusive)
6608 tree nv2
6609 = create_tmp_var_raw (TREE_TYPE (new_var));
6610 gimple_add_tmp_var (nv2);
6611 ctx->cb.decl_map->put (nv, nv2);
6612 x = lang_hooks.decls.omp_clause_default_ctor
6613 (c, nv2, build_outer_var_ref (var, ctx));
6614 gimplify_and_add (x, ilist);
6615 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6616 if (x)
6617 gimplify_and_add (x, dlist);
6619 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6620 if (x)
6621 gimplify_and_add (x, dlist);
6623 else if (is_simd
6624 && ctx->scan_exclusive
6625 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6627 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6628 gimple_add_tmp_var (nv2);
6629 ctx->cb.decl_map->put (new_vard, nv2);
6630 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6631 if (x)
6632 gimplify_and_add (x, dlist);
6634 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6635 goto do_dtor;
6638 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6640 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6641 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6642 && is_omp_target (ctx->stmt))
6644 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6645 tree oldv = NULL_TREE;
6646 gcc_assert (d);
6647 if (DECL_HAS_VALUE_EXPR_P (d))
6648 oldv = DECL_VALUE_EXPR (d);
6649 SET_DECL_VALUE_EXPR (d, new_vard);
6650 DECL_HAS_VALUE_EXPR_P (d) = 1;
6651 lower_omp (&tseq, ctx);
6652 if (oldv)
6653 SET_DECL_VALUE_EXPR (d, oldv);
6654 else
6656 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6657 DECL_HAS_VALUE_EXPR_P (d) = 0;
6660 else
6661 lower_omp (&tseq, ctx);
6662 gimple_seq_add_seq (ilist, tseq);
6664 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6665 if (is_simd)
6667 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6668 lower_omp (&tseq, ctx);
6669 gimple_seq_add_seq (dlist, tseq);
6670 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6672 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6673 if (cond)
6675 if (lab2)
6676 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6677 break;
6679 goto do_dtor;
6681 else
6683 x = omp_reduction_init (c, TREE_TYPE (new_var));
6684 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6685 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6687 if (cond)
6689 gimple *g;
6690 tree lab2 = NULL_TREE;
6691 /* GOMP_taskgroup_reduction_register memsets the whole
6692 array to zero. If the initializer is zero, we don't
6693 need to initialize it again, just mark it as ever
6694 used unconditionally, i.e. cond = true. */
6695 if (initializer_zerop (x))
6697 g = gimple_build_assign (build_simple_mem_ref (cond),
6698 boolean_true_node);
6699 gimple_seq_add_stmt (ilist, g);
6700 break;
6703 /* Otherwise, emit
6704 if (!cond) { cond = true; new_var = x; } */
6705 if (!is_parallel_ctx (ctx))
6707 tree condv = create_tmp_var (boolean_type_node);
6708 tree m = build_simple_mem_ref (cond);
6709 g = gimple_build_assign (condv, m);
6710 gimple_seq_add_stmt (ilist, g);
6711 tree lab1
6712 = create_artificial_label (UNKNOWN_LOCATION);
6713 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6714 g = gimple_build_cond (NE_EXPR, condv,
6715 boolean_false_node,
6716 lab2, lab1);
6717 gimple_seq_add_stmt (ilist, g);
6718 gimple_seq_add_stmt (ilist,
6719 gimple_build_label (lab1));
6721 g = gimple_build_assign (build_simple_mem_ref (cond),
6722 boolean_true_node);
6723 gimple_seq_add_stmt (ilist, g);
6724 gimplify_assign (new_var, x, ilist);
6725 if (lab2)
6726 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6727 break;
6730 /* reduction(-:var) sums up the partial results, so it
6731 acts identically to reduction(+:var). */
6732 if (code == MINUS_EXPR)
6733 code = PLUS_EXPR;
6735 bool is_truth_op
6736 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6737 tree new_vard = new_var;
6738 if (is_simd && omp_privatize_by_reference (var))
6740 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6741 new_vard = TREE_OPERAND (new_var, 0);
6742 gcc_assert (DECL_P (new_vard));
6744 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6745 if (is_simd
6746 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6747 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6748 rvarp = &rvar;
6749 if (is_simd
6750 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6751 ivar, lvar, rvarp,
6752 &rvar2))
6754 if (new_vard != new_var)
6756 SET_DECL_VALUE_EXPR (new_vard,
6757 build_fold_addr_expr (lvar));
6758 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6761 tree ref = build_outer_var_ref (var, ctx);
6763 if (rvarp)
6765 if (ctx->for_simd_scan_phase)
6766 break;
6767 gimplify_assign (ivar, ref, &llist[0]);
6768 ref = build_outer_var_ref (var, ctx);
6769 gimplify_assign (ref, rvar, &llist[3]);
6770 break;
6773 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6775 if (sctx.is_simt)
6777 if (!simt_lane)
6778 simt_lane = create_tmp_var (unsigned_type_node);
6779 x = build_call_expr_internal_loc
6780 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6781 TREE_TYPE (ivar), 2, ivar, simt_lane);
6782 /* Make sure x is evaluated unconditionally. */
6783 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6784 gimplify_assign (bfly_var, x, &llist[2]);
6785 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6786 gimplify_assign (ivar, x, &llist[2]);
6788 tree ivar2 = ivar;
6789 tree ref2 = ref;
6790 if (is_truth_op)
6792 tree zero = build_zero_cst (TREE_TYPE (ivar));
6793 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6794 boolean_type_node, ivar,
6795 zero);
6796 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6797 boolean_type_node, ref,
6798 zero);
6800 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6801 if (is_truth_op)
6802 x = fold_convert (TREE_TYPE (ref), x);
6803 ref = build_outer_var_ref (var, ctx);
6804 gimplify_assign (ref, x, &llist[1]);
6807 else
6809 lower_private_allocate (var, new_var, allocator,
6810 allocate_ptr, ilist, ctx,
6811 false, NULL_TREE);
6812 if (omp_privatize_by_reference (var) && is_simd)
6813 handle_simd_reference (clause_loc, new_vard, ilist);
6814 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6815 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6816 break;
6817 gimplify_assign (new_var, x, ilist);
6818 if (is_simd)
6820 tree ref = build_outer_var_ref (var, ctx);
6821 tree new_var2 = new_var;
6822 tree ref2 = ref;
6823 if (is_truth_op)
6825 tree zero = build_zero_cst (TREE_TYPE (new_var));
6826 new_var2
6827 = fold_build2_loc (clause_loc, NE_EXPR,
6828 boolean_type_node, new_var,
6829 zero);
6830 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6831 boolean_type_node, ref,
6832 zero);
6834 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6835 if (is_truth_op)
6836 x = fold_convert (TREE_TYPE (new_var), x);
6837 ref = build_outer_var_ref (var, ctx);
6838 gimplify_assign (ref, x, dlist);
6840 if (allocator)
6841 goto do_dtor;
6844 break;
6846 default:
6847 gcc_unreachable ();
6851 if (tskred_avar)
6853 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6854 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6857 if (known_eq (sctx.max_vf, 1U))
6859 sctx.is_simt = false;
6860 if (ctx->lastprivate_conditional_map)
6862 if (gimple_omp_for_combined_into_p (ctx->stmt))
6864 /* Signal to lower_omp_1 that it should use parent context. */
6865 ctx->combined_into_simd_safelen1 = true;
6866 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6867 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6868 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6870 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6871 omp_context *outer = ctx->outer;
6872 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6873 outer = outer->outer;
6874 tree *v = ctx->lastprivate_conditional_map->get (o);
6875 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6876 tree *pv = outer->lastprivate_conditional_map->get (po);
6877 *v = *pv;
6880 else
6882 /* When not vectorized, treat lastprivate(conditional:) like
6883 normal lastprivate, as there will be just one simd lane
6884 writing the privatized variable. */
6885 delete ctx->lastprivate_conditional_map;
6886 ctx->lastprivate_conditional_map = NULL;
6891 if (nonconst_simd_if)
6893 if (sctx.lane == NULL_TREE)
6895 sctx.idx = create_tmp_var (unsigned_type_node);
6896 sctx.lane = create_tmp_var (unsigned_type_node);
6898 /* FIXME: For now. */
6899 sctx.is_simt = false;
6902 if (sctx.lane || sctx.is_simt)
6904 uid = create_tmp_var (ptr_type_node, "simduid");
6905 /* Don't want uninit warnings on simduid, it is always uninitialized,
6906 but we use it not for the value, but for the DECL_UID only. */
6907 suppress_warning (uid, OPT_Wuninitialized);
6908 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6909 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6910 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6911 gimple_omp_for_set_clauses (ctx->stmt, c);
6913 /* Emit calls denoting privatized variables and initializing a pointer to
6914 structure that holds private variables as fields after ompdevlow pass. */
6915 if (sctx.is_simt)
6917 sctx.simt_eargs[0] = uid;
6918 gimple *g
6919 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6920 gimple_call_set_lhs (g, uid);
6921 gimple_seq_add_stmt (ilist, g);
6922 sctx.simt_eargs.release ();
6924 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6925 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6926 gimple_call_set_lhs (g, simtrec);
6927 gimple_seq_add_stmt (ilist, g);
6929 if (sctx.lane)
6931 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6932 2 + (nonconst_simd_if != NULL),
6933 uid, integer_zero_node,
6934 nonconst_simd_if);
6935 gimple_call_set_lhs (g, sctx.lane);
6936 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6937 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6938 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6939 build_int_cst (unsigned_type_node, 0));
6940 gimple_seq_add_stmt (ilist, g);
6941 if (sctx.lastlane)
6943 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6944 2, uid, sctx.lane);
6945 gimple_call_set_lhs (g, sctx.lastlane);
6946 gimple_seq_add_stmt (dlist, g);
6947 gimple_seq_add_seq (dlist, llist[3]);
6949 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6950 if (llist[2])
6952 tree simt_vf = create_tmp_var (unsigned_type_node);
6953 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6954 gimple_call_set_lhs (g, simt_vf);
6955 gimple_seq_add_stmt (dlist, g);
6957 tree t = build_int_cst (unsigned_type_node, 1);
6958 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6959 gimple_seq_add_stmt (dlist, g);
6961 t = build_int_cst (unsigned_type_node, 0);
6962 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6963 gimple_seq_add_stmt (dlist, g);
6965 tree body = create_artificial_label (UNKNOWN_LOCATION);
6966 tree header = create_artificial_label (UNKNOWN_LOCATION);
6967 tree end = create_artificial_label (UNKNOWN_LOCATION);
6968 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6969 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6971 gimple_seq_add_seq (dlist, llist[2]);
6973 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6974 gimple_seq_add_stmt (dlist, g);
6976 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6977 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6978 gimple_seq_add_stmt (dlist, g);
6980 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6982 for (int i = 0; i < 2; i++)
6983 if (llist[i])
6985 tree vf = create_tmp_var (unsigned_type_node);
6986 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6987 gimple_call_set_lhs (g, vf);
6988 gimple_seq *seq = i == 0 ? ilist : dlist;
6989 gimple_seq_add_stmt (seq, g);
6990 tree t = build_int_cst (unsigned_type_node, 0);
6991 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6992 gimple_seq_add_stmt (seq, g);
6993 tree body = create_artificial_label (UNKNOWN_LOCATION);
6994 tree header = create_artificial_label (UNKNOWN_LOCATION);
6995 tree end = create_artificial_label (UNKNOWN_LOCATION);
6996 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6997 gimple_seq_add_stmt (seq, gimple_build_label (body));
6998 gimple_seq_add_seq (seq, llist[i]);
6999 t = build_int_cst (unsigned_type_node, 1);
7000 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
7001 gimple_seq_add_stmt (seq, g);
7002 gimple_seq_add_stmt (seq, gimple_build_label (header));
7003 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
7004 gimple_seq_add_stmt (seq, g);
7005 gimple_seq_add_stmt (seq, gimple_build_label (end));
7008 if (sctx.is_simt)
7010 gimple_seq_add_seq (dlist, sctx.simt_dlist);
7011 gimple *g
7012 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
7013 gimple_seq_add_stmt (dlist, g);
7016 /* The copyin sequence is not to be executed by the main thread, since
7017 that would result in self-copies. Perhaps not visible to scalars,
7018 but it certainly is to C++ operator=. */
7019 if (copyin_seq)
7021 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
7023 x = build2 (NE_EXPR, boolean_type_node, x,
7024 build_int_cst (TREE_TYPE (x), 0));
7025 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
7026 gimplify_and_add (x, ilist);
7029 /* If any copyin variable is passed by reference, we must ensure the
7030 master thread doesn't modify it before it is copied over in all
7031 threads. Similarly for variables in both firstprivate and
7032 lastprivate clauses we need to ensure the lastprivate copying
7033 happens after firstprivate copying in all threads. And similarly
7034 for UDRs if initializer expression refers to omp_orig. */
7035 if (copyin_by_ref || lastprivate_firstprivate
7036 || (reduction_omp_orig_ref
7037 && !ctx->scan_inclusive
7038 && !ctx->scan_exclusive))
7040 /* Don't add any barrier for #pragma omp simd or
7041 #pragma omp distribute. */
7042 if (!is_task_ctx (ctx)
7043 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
7044 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
7045 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
7048 /* If max_vf is non-zero, then we can use only a vectorization factor
7049 up to the max_vf we chose. So stick it into the safelen clause. */
7050 if (maybe_ne (sctx.max_vf, 0U))
7052 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
7053 OMP_CLAUSE_SAFELEN);
7054 poly_uint64 safe_len;
7055 if (c == NULL_TREE
7056 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
7057 && maybe_gt (safe_len, sctx.max_vf)))
7059 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
7060 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
7061 sctx.max_vf);
7062 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
7063 gimple_omp_for_set_clauses (ctx->stmt, c);
7068 /* Create temporary variables for lastprivate(conditional:) implementation
7069 in context CTX with CLAUSES. */
7071 static void
7072 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
7074 tree iter_type = NULL_TREE;
7075 tree cond_ptr = NULL_TREE;
7076 tree iter_var = NULL_TREE;
7077 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7078 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
7079 tree next = *clauses;
7080 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7081 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7082 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7084 if (is_simd)
7086 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7087 gcc_assert (cc);
7088 if (iter_type == NULL_TREE)
7090 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7091 iter_var = create_tmp_var_raw (iter_type);
7092 DECL_CONTEXT (iter_var) = current_function_decl;
7093 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7094 DECL_CHAIN (iter_var) = ctx->block_vars;
7095 ctx->block_vars = iter_var;
7096 tree c3
7097 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7098 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7099 OMP_CLAUSE_DECL (c3) = iter_var;
7100 OMP_CLAUSE_CHAIN (c3) = *clauses;
7101 *clauses = c3;
7102 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7104 next = OMP_CLAUSE_CHAIN (cc);
7105 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7106 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7107 ctx->lastprivate_conditional_map->put (o, v);
7108 continue;
7110 if (iter_type == NULL)
7112 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7114 struct omp_for_data fd;
7115 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7116 NULL);
7117 iter_type = unsigned_type_for (fd.iter_type);
7119 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7120 iter_type = unsigned_type_node;
7121 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7122 if (c2)
7124 cond_ptr
7125 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7126 OMP_CLAUSE_DECL (c2) = cond_ptr;
7128 else
7130 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7131 DECL_CONTEXT (cond_ptr) = current_function_decl;
7132 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7133 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7134 ctx->block_vars = cond_ptr;
7135 c2 = build_omp_clause (UNKNOWN_LOCATION,
7136 OMP_CLAUSE__CONDTEMP_);
7137 OMP_CLAUSE_DECL (c2) = cond_ptr;
7138 OMP_CLAUSE_CHAIN (c2) = *clauses;
7139 *clauses = c2;
7141 iter_var = create_tmp_var_raw (iter_type);
7142 DECL_CONTEXT (iter_var) = current_function_decl;
7143 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7144 DECL_CHAIN (iter_var) = ctx->block_vars;
7145 ctx->block_vars = iter_var;
7146 tree c3
7147 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7148 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7149 OMP_CLAUSE_DECL (c3) = iter_var;
7150 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7151 OMP_CLAUSE_CHAIN (c2) = c3;
7152 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7154 tree v = create_tmp_var_raw (iter_type);
7155 DECL_CONTEXT (v) = current_function_decl;
7156 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7157 DECL_CHAIN (v) = ctx->block_vars;
7158 ctx->block_vars = v;
7159 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7160 ctx->lastprivate_conditional_map->put (o, v);
7165 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7166 both parallel and workshare constructs. PREDICATE may be NULL if it's
7167 always true. BODY_P is the sequence to insert early initialization
7168 if needed, STMT_LIST is where the non-conditional lastprivate handling
7169 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7170 section. */
7172 static void
7173 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7174 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7175 omp_context *ctx)
7177 tree x, c, label = NULL, orig_clauses = clauses;
7178 bool par_clauses = false;
7179 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7180 unsigned HOST_WIDE_INT conditional_off = 0;
7181 gimple_seq post_stmt_list = NULL;
7183 /* Early exit if there are no lastprivate or linear clauses. */
7184 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7185 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7186 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7187 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7188 break;
7189 if (clauses == NULL)
7191 /* If this was a workshare clause, see if it had been combined
7192 with its parallel. In that case, look for the clauses on the
7193 parallel statement itself. */
7194 if (is_parallel_ctx (ctx))
7195 return;
7197 ctx = ctx->outer;
7198 if (ctx == NULL || !is_parallel_ctx (ctx))
7199 return;
7201 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7202 OMP_CLAUSE_LASTPRIVATE);
7203 if (clauses == NULL)
7204 return;
7205 par_clauses = true;
7208 bool maybe_simt = false;
7209 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7210 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7212 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7213 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7214 if (simduid)
7215 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7218 if (predicate)
7220 gcond *stmt;
7221 tree label_true, arm1, arm2;
7222 enum tree_code pred_code = TREE_CODE (predicate);
7224 label = create_artificial_label (UNKNOWN_LOCATION);
7225 label_true = create_artificial_label (UNKNOWN_LOCATION);
7226 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7228 arm1 = TREE_OPERAND (predicate, 0);
7229 arm2 = TREE_OPERAND (predicate, 1);
7230 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7231 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7233 else
7235 arm1 = predicate;
7236 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7237 arm2 = boolean_false_node;
7238 pred_code = NE_EXPR;
7240 if (maybe_simt)
7242 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7243 c = fold_convert (integer_type_node, c);
7244 simtcond = create_tmp_var (integer_type_node);
7245 gimplify_assign (simtcond, c, stmt_list);
7246 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7247 1, simtcond);
7248 c = create_tmp_var (integer_type_node);
7249 gimple_call_set_lhs (g, c);
7250 gimple_seq_add_stmt (stmt_list, g);
7251 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7252 label_true, label);
7254 else
7255 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7256 gimple_seq_add_stmt (stmt_list, stmt);
7257 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7260 tree cond_ptr = NULL_TREE;
7261 for (c = clauses; c ;)
7263 tree var, new_var;
7264 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7265 gimple_seq *this_stmt_list = stmt_list;
7266 tree lab2 = NULL_TREE;
7268 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7269 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7270 && ctx->lastprivate_conditional_map
7271 && !ctx->combined_into_simd_safelen1)
7273 gcc_assert (body_p);
7274 if (simduid)
7275 goto next;
7276 if (cond_ptr == NULL_TREE)
7278 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7279 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7281 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7282 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7283 tree v = *ctx->lastprivate_conditional_map->get (o);
7284 gimplify_assign (v, build_zero_cst (type), body_p);
7285 this_stmt_list = cstmt_list;
7286 tree mem;
7287 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7289 mem = build2 (MEM_REF, type, cond_ptr,
7290 build_int_cst (TREE_TYPE (cond_ptr),
7291 conditional_off));
7292 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7294 else
7295 mem = build4 (ARRAY_REF, type, cond_ptr,
7296 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7297 tree mem2 = copy_node (mem);
7298 gimple_seq seq = NULL;
7299 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7300 gimple_seq_add_seq (this_stmt_list, seq);
7301 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7302 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7303 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7304 gimple_seq_add_stmt (this_stmt_list, g);
7305 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7306 gimplify_assign (mem2, v, this_stmt_list);
7308 else if (predicate
7309 && ctx->combined_into_simd_safelen1
7310 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7311 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7312 && ctx->lastprivate_conditional_map)
7313 this_stmt_list = &post_stmt_list;
7315 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7316 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7317 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7319 var = OMP_CLAUSE_DECL (c);
7320 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7321 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7322 && is_taskloop_ctx (ctx))
7324 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7325 new_var = lookup_decl (var, ctx->outer);
7327 else
7329 new_var = lookup_decl (var, ctx);
7330 /* Avoid uninitialized warnings for lastprivate and
7331 for linear iterators. */
7332 if (predicate
7333 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7334 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7335 suppress_warning (new_var, OPT_Wuninitialized);
7338 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7340 tree val = DECL_VALUE_EXPR (new_var);
7341 if (TREE_CODE (val) == ARRAY_REF
7342 && VAR_P (TREE_OPERAND (val, 0))
7343 && lookup_attribute ("omp simd array",
7344 DECL_ATTRIBUTES (TREE_OPERAND (val,
7345 0))))
7347 if (lastlane == NULL)
7349 lastlane = create_tmp_var (unsigned_type_node);
7350 gcall *g
7351 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7352 2, simduid,
7353 TREE_OPERAND (val, 1));
7354 gimple_call_set_lhs (g, lastlane);
7355 gimple_seq_add_stmt (this_stmt_list, g);
7357 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7358 TREE_OPERAND (val, 0), lastlane,
7359 NULL_TREE, NULL_TREE);
7360 TREE_THIS_NOTRAP (new_var) = 1;
7363 else if (maybe_simt)
7365 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7366 ? DECL_VALUE_EXPR (new_var)
7367 : new_var);
7368 if (simtlast == NULL)
7370 simtlast = create_tmp_var (unsigned_type_node);
7371 gcall *g = gimple_build_call_internal
7372 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7373 gimple_call_set_lhs (g, simtlast);
7374 gimple_seq_add_stmt (this_stmt_list, g);
7376 x = build_call_expr_internal_loc
7377 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7378 TREE_TYPE (val), 2, val, simtlast);
7379 new_var = unshare_expr (new_var);
7380 gimplify_assign (new_var, x, this_stmt_list);
7381 new_var = unshare_expr (new_var);
7384 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7385 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7387 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7388 gimple_seq_add_seq (this_stmt_list,
7389 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7390 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7392 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7393 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7395 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7396 gimple_seq_add_seq (this_stmt_list,
7397 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7398 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7401 x = NULL_TREE;
7402 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7403 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7404 && is_taskloop_ctx (ctx))
7406 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7407 ctx->outer->outer);
7408 if (is_global_var (ovar))
7409 x = ovar;
7411 if (!x)
7412 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7413 if (omp_privatize_by_reference (var))
7414 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7415 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7416 gimplify_and_add (x, this_stmt_list);
7418 if (lab2)
7419 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7422 next:
7423 c = OMP_CLAUSE_CHAIN (c);
7424 if (c == NULL && !par_clauses)
7426 /* If this was a workshare clause, see if it had been combined
7427 with its parallel. In that case, continue looking for the
7428 clauses also on the parallel statement itself. */
7429 if (is_parallel_ctx (ctx))
7430 break;
7432 ctx = ctx->outer;
7433 if (ctx == NULL || !is_parallel_ctx (ctx))
7434 break;
7436 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7437 OMP_CLAUSE_LASTPRIVATE);
7438 par_clauses = true;
7442 if (label)
7443 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7444 gimple_seq_add_seq (stmt_list, post_stmt_list);
7447 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7448 (which might be a placeholder). INNER is true if this is an inner
7449 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7450 join markers. Generate the before-loop forking sequence in
7451 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7452 general form of these sequences is
7454 GOACC_REDUCTION_SETUP
7455 GOACC_FORK
7456 GOACC_REDUCTION_INIT
7458 GOACC_REDUCTION_FINI
7459 GOACC_JOIN
7460 GOACC_REDUCTION_TEARDOWN. */
7462 static void
7463 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7464 gcall *fork, gcall *private_marker, gcall *join,
7465 gimple_seq *fork_seq, gimple_seq *join_seq,
7466 omp_context *ctx)
7468 gimple_seq before_fork = NULL;
7469 gimple_seq after_fork = NULL;
7470 gimple_seq before_join = NULL;
7471 gimple_seq after_join = NULL;
7472 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7473 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7474 unsigned offset = 0;
7476 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7477 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7479 /* No 'reduction' clauses on OpenACC 'kernels'. */
7480 gcc_checking_assert (!is_oacc_kernels (ctx));
7481 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7482 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7484 tree orig = OMP_CLAUSE_DECL (c);
7485 tree var = maybe_lookup_decl (orig, ctx);
7486 tree ref_to_res = NULL_TREE;
7487 tree incoming, outgoing, v1, v2, v3;
7488 bool is_private = false;
7490 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7491 if (rcode == MINUS_EXPR)
7492 rcode = PLUS_EXPR;
7493 else if (rcode == TRUTH_ANDIF_EXPR)
7494 rcode = BIT_AND_EXPR;
7495 else if (rcode == TRUTH_ORIF_EXPR)
7496 rcode = BIT_IOR_EXPR;
7497 tree op = build_int_cst (unsigned_type_node, rcode);
7499 if (!var)
7500 var = orig;
7502 incoming = outgoing = var;
7504 if (!inner)
7506 /* See if an outer construct also reduces this variable. */
7507 omp_context *outer = ctx;
7509 while (omp_context *probe = outer->outer)
7511 enum gimple_code type = gimple_code (probe->stmt);
7512 tree cls;
7514 switch (type)
7516 case GIMPLE_OMP_FOR:
7517 cls = gimple_omp_for_clauses (probe->stmt);
7518 break;
7520 case GIMPLE_OMP_TARGET:
7521 /* No 'reduction' clauses inside OpenACC 'kernels'
7522 regions. */
7523 gcc_checking_assert (!is_oacc_kernels (probe));
7525 if (!is_gimple_omp_offloaded (probe->stmt))
7526 goto do_lookup;
7528 cls = gimple_omp_target_clauses (probe->stmt);
7529 break;
7531 default:
7532 goto do_lookup;
7535 outer = probe;
7536 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7537 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7538 && orig == OMP_CLAUSE_DECL (cls))
7540 incoming = outgoing = lookup_decl (orig, probe);
7541 goto has_outer_reduction;
7543 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7544 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7545 && orig == OMP_CLAUSE_DECL (cls))
7547 is_private = true;
7548 goto do_lookup;
7552 do_lookup:
7553 /* This is the outermost construct with this reduction,
7554 see if there's a mapping for it. */
7555 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7556 && maybe_lookup_field (orig, outer) && !is_private)
7558 ref_to_res = build_receiver_ref (orig, false, outer);
7559 if (omp_privatize_by_reference (orig))
7560 ref_to_res = build_simple_mem_ref (ref_to_res);
7562 tree type = TREE_TYPE (var);
7563 if (POINTER_TYPE_P (type))
7564 type = TREE_TYPE (type);
7566 outgoing = var;
7567 incoming = omp_reduction_init_op (loc, rcode, type);
7569 else
7571 /* Try to look at enclosing contexts for reduction var,
7572 use original if no mapping found. */
7573 tree t = NULL_TREE;
7574 omp_context *c = ctx->outer;
7575 while (c && !t)
7577 t = maybe_lookup_decl (orig, c);
7578 c = c->outer;
7580 incoming = outgoing = (t ? t : orig);
7583 has_outer_reduction:;
7586 if (!ref_to_res)
7587 ref_to_res = integer_zero_node;
7589 if (omp_privatize_by_reference (orig))
7591 tree type = TREE_TYPE (var);
7592 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7594 if (!inner)
7596 tree x = create_tmp_var (TREE_TYPE (type), id);
7597 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7600 v1 = create_tmp_var (type, id);
7601 v2 = create_tmp_var (type, id);
7602 v3 = create_tmp_var (type, id);
7604 gimplify_assign (v1, var, fork_seq);
7605 gimplify_assign (v2, var, fork_seq);
7606 gimplify_assign (v3, var, fork_seq);
7608 var = build_simple_mem_ref (var);
7609 v1 = build_simple_mem_ref (v1);
7610 v2 = build_simple_mem_ref (v2);
7611 v3 = build_simple_mem_ref (v3);
7612 outgoing = build_simple_mem_ref (outgoing);
7614 if (!TREE_CONSTANT (incoming))
7615 incoming = build_simple_mem_ref (incoming);
7617 else
7618 v1 = v2 = v3 = var;
7620 /* Determine position in reduction buffer, which may be used
7621 by target. The parser has ensured that this is not a
7622 variable-sized type. */
7623 fixed_size_mode mode
7624 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7625 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7626 offset = (offset + align - 1) & ~(align - 1);
7627 tree off = build_int_cst (sizetype, offset);
7628 offset += GET_MODE_SIZE (mode);
7630 if (!init_code)
7632 init_code = build_int_cst (integer_type_node,
7633 IFN_GOACC_REDUCTION_INIT);
7634 fini_code = build_int_cst (integer_type_node,
7635 IFN_GOACC_REDUCTION_FINI);
7636 setup_code = build_int_cst (integer_type_node,
7637 IFN_GOACC_REDUCTION_SETUP);
7638 teardown_code = build_int_cst (integer_type_node,
7639 IFN_GOACC_REDUCTION_TEARDOWN);
7642 tree setup_call
7643 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7644 TREE_TYPE (var), 6, setup_code,
7645 unshare_expr (ref_to_res),
7646 incoming, level, op, off);
7647 tree init_call
7648 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7649 TREE_TYPE (var), 6, init_code,
7650 unshare_expr (ref_to_res),
7651 v1, level, op, off);
7652 tree fini_call
7653 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7654 TREE_TYPE (var), 6, fini_code,
7655 unshare_expr (ref_to_res),
7656 v2, level, op, off);
7657 tree teardown_call
7658 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7659 TREE_TYPE (var), 6, teardown_code,
7660 ref_to_res, v3, level, op, off);
7662 gimplify_assign (v1, setup_call, &before_fork);
7663 gimplify_assign (v2, init_call, &after_fork);
7664 gimplify_assign (v3, fini_call, &before_join);
7665 gimplify_assign (outgoing, teardown_call, &after_join);
7668 /* Now stitch things together. */
7669 gimple_seq_add_seq (fork_seq, before_fork);
7670 if (private_marker)
7671 gimple_seq_add_stmt (fork_seq, private_marker);
7672 if (fork)
7673 gimple_seq_add_stmt (fork_seq, fork);
7674 gimple_seq_add_seq (fork_seq, after_fork);
7676 gimple_seq_add_seq (join_seq, before_join);
7677 if (join)
7678 gimple_seq_add_stmt (join_seq, join);
7679 gimple_seq_add_seq (join_seq, after_join);
7682 /* Generate code to implement the REDUCTION clauses, append it
7683 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7684 that should be emitted also inside of the critical section,
7685 in that case clear *CLIST afterwards, otherwise leave it as is
7686 and let the caller emit it itself. */
7688 static void
7689 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7690 gimple_seq *clist, omp_context *ctx)
7692 gimple_seq sub_seq = NULL;
7693 gimple *stmt;
7694 tree x, c;
7695 int count = 0;
7697 /* OpenACC loop reductions are handled elsewhere. */
7698 if (is_gimple_omp_oacc (ctx->stmt))
7699 return;
7701 /* SIMD reductions are handled in lower_rec_input_clauses. */
7702 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7703 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7704 return;
7706 /* inscan reductions are handled elsewhere. */
7707 if (ctx->scan_inclusive || ctx->scan_exclusive)
7708 return;
7710 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7711 update in that case, otherwise use a lock. */
7712 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7713 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7714 && !OMP_CLAUSE_REDUCTION_TASK (c))
7716 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7717 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7719 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7720 count = -1;
7721 break;
7723 count++;
7726 if (count == 0)
7727 return;
7729 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7731 tree var, ref, new_var, orig_var;
7732 enum tree_code code;
7733 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7735 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7736 || OMP_CLAUSE_REDUCTION_TASK (c))
7737 continue;
7739 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7740 orig_var = var = OMP_CLAUSE_DECL (c);
7741 if (TREE_CODE (var) == MEM_REF)
7743 var = TREE_OPERAND (var, 0);
7744 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7745 var = TREE_OPERAND (var, 0);
7746 if (TREE_CODE (var) == ADDR_EXPR)
7747 var = TREE_OPERAND (var, 0);
7748 else
7750 /* If this is a pointer or referenced based array
7751 section, the var could be private in the outer
7752 context e.g. on orphaned loop construct. Pretend this
7753 is private variable's outer reference. */
7754 ccode = OMP_CLAUSE_PRIVATE;
7755 if (TREE_CODE (var) == INDIRECT_REF)
7756 var = TREE_OPERAND (var, 0);
7758 orig_var = var;
7759 if (is_variable_sized (var))
7761 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7762 var = DECL_VALUE_EXPR (var);
7763 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7764 var = TREE_OPERAND (var, 0);
7765 gcc_assert (DECL_P (var));
7768 new_var = lookup_decl (var, ctx);
7769 if (var == OMP_CLAUSE_DECL (c)
7770 && omp_privatize_by_reference (var))
7771 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7772 ref = build_outer_var_ref (var, ctx, ccode);
7773 code = OMP_CLAUSE_REDUCTION_CODE (c);
7775 /* reduction(-:var) sums up the partial results, so it acts
7776 identically to reduction(+:var). */
7777 if (code == MINUS_EXPR)
7778 code = PLUS_EXPR;
7780 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7781 if (count == 1)
7783 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7785 addr = save_expr (addr);
7786 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7787 tree new_var2 = new_var;
7788 tree ref2 = ref;
7789 if (is_truth_op)
7791 tree zero = build_zero_cst (TREE_TYPE (new_var));
7792 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7793 boolean_type_node, new_var, zero);
7794 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7795 ref, zero);
7797 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7798 new_var2);
7799 if (is_truth_op)
7800 x = fold_convert (TREE_TYPE (new_var), x);
7801 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7802 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7803 gimplify_and_add (x, stmt_seqp);
7804 return;
7806 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7808 tree d = OMP_CLAUSE_DECL (c);
7809 tree type = TREE_TYPE (d);
7810 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7811 tree i = create_tmp_var (TREE_TYPE (v));
7812 tree ptype = build_pointer_type (TREE_TYPE (type));
7813 tree bias = TREE_OPERAND (d, 1);
7814 d = TREE_OPERAND (d, 0);
7815 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7817 tree b = TREE_OPERAND (d, 1);
7818 b = maybe_lookup_decl (b, ctx);
7819 if (b == NULL)
7821 b = TREE_OPERAND (d, 1);
7822 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7824 if (integer_zerop (bias))
7825 bias = b;
7826 else
7828 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7829 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7830 TREE_TYPE (b), b, bias);
7832 d = TREE_OPERAND (d, 0);
7834 /* For ref build_outer_var_ref already performs this, so
7835 only new_var needs a dereference. */
7836 if (TREE_CODE (d) == INDIRECT_REF)
7838 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7839 gcc_assert (omp_privatize_by_reference (var)
7840 && var == orig_var);
7842 else if (TREE_CODE (d) == ADDR_EXPR)
7844 if (orig_var == var)
7846 new_var = build_fold_addr_expr (new_var);
7847 ref = build_fold_addr_expr (ref);
7850 else
7852 gcc_assert (orig_var == var);
7853 if (omp_privatize_by_reference (var))
7854 ref = build_fold_addr_expr (ref);
7856 if (DECL_P (v))
7858 tree t = maybe_lookup_decl (v, ctx);
7859 if (t)
7860 v = t;
7861 else
7862 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7863 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7865 if (!integer_zerop (bias))
7867 bias = fold_convert_loc (clause_loc, sizetype, bias);
7868 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7869 TREE_TYPE (new_var), new_var,
7870 unshare_expr (bias));
7871 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7872 TREE_TYPE (ref), ref, bias);
7874 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7875 ref = fold_convert_loc (clause_loc, ptype, ref);
7876 tree m = create_tmp_var (ptype);
7877 gimplify_assign (m, new_var, stmt_seqp);
7878 new_var = m;
7879 m = create_tmp_var (ptype);
7880 gimplify_assign (m, ref, stmt_seqp);
7881 ref = m;
7882 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7883 tree body = create_artificial_label (UNKNOWN_LOCATION);
7884 tree end = create_artificial_label (UNKNOWN_LOCATION);
7885 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7886 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7887 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7888 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7890 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7891 tree decl_placeholder
7892 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7893 SET_DECL_VALUE_EXPR (placeholder, out);
7894 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7895 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7896 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7897 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7898 gimple_seq_add_seq (&sub_seq,
7899 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7900 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7901 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7902 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7904 else
7906 tree out2 = out;
7907 tree priv2 = priv;
7908 if (is_truth_op)
7910 tree zero = build_zero_cst (TREE_TYPE (out));
7911 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7912 boolean_type_node, out, zero);
7913 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7914 boolean_type_node, priv, zero);
7916 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7917 if (is_truth_op)
7918 x = fold_convert (TREE_TYPE (out), x);
7919 out = unshare_expr (out);
7920 gimplify_assign (out, x, &sub_seq);
7922 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7923 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7924 gimple_seq_add_stmt (&sub_seq, g);
7925 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7926 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7927 gimple_seq_add_stmt (&sub_seq, g);
7928 g = gimple_build_assign (i, PLUS_EXPR, i,
7929 build_int_cst (TREE_TYPE (i), 1));
7930 gimple_seq_add_stmt (&sub_seq, g);
7931 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7932 gimple_seq_add_stmt (&sub_seq, g);
7933 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7935 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7937 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7939 if (omp_privatize_by_reference (var)
7940 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7941 TREE_TYPE (ref)))
7942 ref = build_fold_addr_expr_loc (clause_loc, ref);
7943 SET_DECL_VALUE_EXPR (placeholder, ref);
7944 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7945 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7946 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7947 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7948 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7950 else
7952 tree new_var2 = new_var;
7953 tree ref2 = ref;
7954 if (is_truth_op)
7956 tree zero = build_zero_cst (TREE_TYPE (new_var));
7957 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7958 boolean_type_node, new_var, zero);
7959 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7960 ref, zero);
7962 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7963 if (is_truth_op)
7964 x = fold_convert (TREE_TYPE (new_var), x);
7965 ref = build_outer_var_ref (var, ctx);
7966 gimplify_assign (ref, x, &sub_seq);
7970 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7972 gimple_seq_add_stmt (stmt_seqp, stmt);
7974 gimple_seq_add_seq (stmt_seqp, sub_seq);
7976 if (clist)
7978 gimple_seq_add_seq (stmt_seqp, *clist);
7979 *clist = NULL;
7982 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7984 gimple_seq_add_stmt (stmt_seqp, stmt);
7988 /* Generate code to implement the COPYPRIVATE clauses. */
7990 static void
7991 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7992 omp_context *ctx)
7994 tree c;
7996 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7998 tree var, new_var, ref, x;
7999 bool by_ref;
8000 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8002 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
8003 continue;
8005 var = OMP_CLAUSE_DECL (c);
8006 by_ref = use_pointer_for_field (var, NULL);
8008 ref = build_sender_ref (var, ctx);
8009 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
8010 if (by_ref)
8012 x = build_fold_addr_expr_loc (clause_loc, new_var);
8013 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
8015 gimplify_assign (ref, x, slist);
8017 ref = build_receiver_ref (var, false, ctx);
8018 if (by_ref)
8020 ref = fold_convert_loc (clause_loc,
8021 build_pointer_type (TREE_TYPE (new_var)),
8022 ref);
8023 ref = build_fold_indirect_ref_loc (clause_loc, ref);
8025 if (omp_privatize_by_reference (var))
8027 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
8028 ref = build_simple_mem_ref_loc (clause_loc, ref);
8029 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8031 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
8032 gimplify_and_add (x, rlist);
8037 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8038 and REDUCTION from the sender (aka parent) side. */
8040 static void
8041 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
8042 omp_context *ctx)
8044 tree c, t;
8045 int ignored_looptemp = 0;
8046 bool is_taskloop = false;
8048 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8049 by GOMP_taskloop. */
8050 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
8052 ignored_looptemp = 2;
8053 is_taskloop = true;
8056 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8058 tree val, ref, x, var;
8059 bool by_ref, do_in = false, do_out = false;
8060 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8062 switch (OMP_CLAUSE_CODE (c))
8064 case OMP_CLAUSE_PRIVATE:
8065 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8066 break;
8067 continue;
8068 case OMP_CLAUSE_FIRSTPRIVATE:
8069 case OMP_CLAUSE_COPYIN:
8070 case OMP_CLAUSE_LASTPRIVATE:
8071 case OMP_CLAUSE_IN_REDUCTION:
8072 case OMP_CLAUSE__REDUCTEMP_:
8073 break;
8074 case OMP_CLAUSE_REDUCTION:
8075 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
8076 continue;
8077 break;
8078 case OMP_CLAUSE_SHARED:
8079 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8080 break;
8081 continue;
8082 case OMP_CLAUSE__LOOPTEMP_:
8083 if (ignored_looptemp)
8085 ignored_looptemp--;
8086 continue;
8088 break;
8089 default:
8090 continue;
8093 val = OMP_CLAUSE_DECL (c);
8094 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8095 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8096 && TREE_CODE (val) == MEM_REF)
8098 val = TREE_OPERAND (val, 0);
8099 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8100 val = TREE_OPERAND (val, 0);
8101 if (TREE_CODE (val) == INDIRECT_REF
8102 || TREE_CODE (val) == ADDR_EXPR)
8103 val = TREE_OPERAND (val, 0);
8104 if (is_variable_sized (val))
8105 continue;
8108 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8109 outer taskloop region. */
8110 omp_context *ctx_for_o = ctx;
8111 if (is_taskloop
8112 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8113 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8114 ctx_for_o = ctx->outer;
8116 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8118 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8119 && is_global_var (var)
8120 && (val == OMP_CLAUSE_DECL (c)
8121 || !is_task_ctx (ctx)
8122 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8123 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8124 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8125 != POINTER_TYPE)))))
8126 continue;
8128 t = omp_member_access_dummy_var (var);
8129 if (t)
8131 var = DECL_VALUE_EXPR (var);
8132 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8133 if (o != t)
8134 var = unshare_and_remap (var, t, o);
8135 else
8136 var = unshare_expr (var);
8139 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8141 /* Handle taskloop firstprivate/lastprivate, where the
8142 lastprivate on GIMPLE_OMP_TASK is represented as
8143 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8144 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8145 x = omp_build_component_ref (ctx->sender_decl, f);
8146 if (use_pointer_for_field (val, ctx))
8147 var = build_fold_addr_expr (var);
8148 gimplify_assign (x, var, ilist);
8149 DECL_ABSTRACT_ORIGIN (f) = NULL;
8150 continue;
8153 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8154 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8155 || val == OMP_CLAUSE_DECL (c))
8156 && is_variable_sized (val))
8157 continue;
8158 by_ref = use_pointer_for_field (val, NULL);
8160 switch (OMP_CLAUSE_CODE (c))
8162 case OMP_CLAUSE_FIRSTPRIVATE:
8163 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8164 && !by_ref
8165 && is_task_ctx (ctx))
8166 suppress_warning (var);
8167 do_in = true;
8168 break;
8170 case OMP_CLAUSE_PRIVATE:
8171 case OMP_CLAUSE_COPYIN:
8172 case OMP_CLAUSE__LOOPTEMP_:
8173 case OMP_CLAUSE__REDUCTEMP_:
8174 do_in = true;
8175 break;
8177 case OMP_CLAUSE_LASTPRIVATE:
8178 if (by_ref || omp_privatize_by_reference (val))
8180 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8181 continue;
8182 do_in = true;
8184 else
8186 do_out = true;
8187 if (lang_hooks.decls.omp_private_outer_ref (val))
8188 do_in = true;
8190 break;
8192 case OMP_CLAUSE_REDUCTION:
8193 case OMP_CLAUSE_IN_REDUCTION:
8194 do_in = true;
8195 if (val == OMP_CLAUSE_DECL (c))
8197 if (is_task_ctx (ctx))
8198 by_ref = use_pointer_for_field (val, ctx);
8199 else
8200 do_out = !(by_ref || omp_privatize_by_reference (val));
8202 else
8203 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8204 break;
8206 default:
8207 gcc_unreachable ();
8210 if (do_in)
8212 ref = build_sender_ref (val, ctx);
8213 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8214 gimplify_assign (ref, x, ilist);
8215 if (is_task_ctx (ctx))
8216 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8219 if (do_out)
8221 ref = build_sender_ref (val, ctx);
8222 gimplify_assign (var, ref, olist);
8227 /* Generate code to implement SHARED from the sender (aka parent)
8228 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8229 list things that got automatically shared. */
8231 static void
8232 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8234 tree var, ovar, nvar, t, f, x, record_type;
8236 if (ctx->record_type == NULL)
8237 return;
8239 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8240 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8242 ovar = DECL_ABSTRACT_ORIGIN (f);
8243 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8244 continue;
8246 nvar = maybe_lookup_decl (ovar, ctx);
8247 if (!nvar
8248 || !DECL_HAS_VALUE_EXPR_P (nvar)
8249 || (ctx->allocate_map
8250 && ctx->allocate_map->get (ovar)))
8251 continue;
8253 /* If CTX is a nested parallel directive. Find the immediately
8254 enclosing parallel or workshare construct that contains a
8255 mapping for OVAR. */
8256 var = lookup_decl_in_outer_ctx (ovar, ctx);
8258 t = omp_member_access_dummy_var (var);
8259 if (t)
8261 var = DECL_VALUE_EXPR (var);
8262 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8263 if (o != t)
8264 var = unshare_and_remap (var, t, o);
8265 else
8266 var = unshare_expr (var);
8269 if (use_pointer_for_field (ovar, ctx))
8271 x = build_sender_ref (ovar, ctx);
8272 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8273 && TREE_TYPE (f) == TREE_TYPE (ovar))
8275 gcc_assert (is_parallel_ctx (ctx)
8276 && DECL_ARTIFICIAL (ovar));
8277 /* _condtemp_ clause. */
8278 var = build_constructor (TREE_TYPE (x), NULL);
8280 else
8281 var = build_fold_addr_expr (var);
8282 gimplify_assign (x, var, ilist);
8284 else
8286 x = build_sender_ref (ovar, ctx);
8287 gimplify_assign (x, var, ilist);
8289 if (!TREE_READONLY (var)
8290 /* We don't need to receive a new reference to a result
8291 or parm decl. In fact we may not store to it as we will
8292 invalidate any pending RSO and generate wrong gimple
8293 during inlining. */
8294 && !((TREE_CODE (var) == RESULT_DECL
8295 || TREE_CODE (var) == PARM_DECL)
8296 && DECL_BY_REFERENCE (var)))
8298 x = build_sender_ref (ovar, ctx);
8299 gimplify_assign (var, x, olist);
8305 /* Emit an OpenACC head marker call, encapulating the partitioning and
8306 other information that must be processed by the target compiler.
8307 Return the maximum number of dimensions the associated loop might
8308 be partitioned over. */
8310 static unsigned
8311 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8312 gimple_seq *seq, omp_context *ctx)
8314 unsigned levels = 0;
8315 unsigned tag = 0;
8316 tree gang_static = NULL_TREE;
8317 auto_vec<tree, 5> args;
8319 args.quick_push (build_int_cst
8320 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8321 args.quick_push (ddvar);
8322 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8324 switch (OMP_CLAUSE_CODE (c))
8326 case OMP_CLAUSE_GANG:
8327 tag |= OLF_DIM_GANG;
8328 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8329 /* static:* is represented by -1, and we can ignore it, as
8330 scheduling is always static. */
8331 if (gang_static && integer_minus_onep (gang_static))
8332 gang_static = NULL_TREE;
8333 levels++;
8334 break;
8336 case OMP_CLAUSE_WORKER:
8337 tag |= OLF_DIM_WORKER;
8338 levels++;
8339 break;
8341 case OMP_CLAUSE_VECTOR:
8342 tag |= OLF_DIM_VECTOR;
8343 levels++;
8344 break;
8346 case OMP_CLAUSE_SEQ:
8347 tag |= OLF_SEQ;
8348 break;
8350 case OMP_CLAUSE_AUTO:
8351 tag |= OLF_AUTO;
8352 break;
8354 case OMP_CLAUSE_INDEPENDENT:
8355 tag |= OLF_INDEPENDENT;
8356 break;
8358 case OMP_CLAUSE_TILE:
8359 tag |= OLF_TILE;
8360 break;
8362 case OMP_CLAUSE_REDUCTION:
8363 tag |= OLF_REDUCTION;
8364 break;
8366 default:
8367 continue;
8371 if (gang_static)
8373 if (DECL_P (gang_static))
8374 gang_static = build_outer_var_ref (gang_static, ctx);
8375 tag |= OLF_GANG_STATIC;
8378 omp_context *tgt = enclosing_target_ctx (ctx);
8379 if (!tgt || is_oacc_parallel_or_serial (tgt))
8381 else if (is_oacc_kernels (tgt))
8382 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8383 gcc_unreachable ();
8384 else if (is_oacc_kernels_decomposed_part (tgt))
8386 else
8387 gcc_unreachable ();
8389 /* In a parallel region, loops are implicitly INDEPENDENT. */
8390 if (!tgt || is_oacc_parallel_or_serial (tgt))
8391 tag |= OLF_INDEPENDENT;
8393 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8394 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8395 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8397 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8398 gcc_assert (!(tag & OLF_AUTO));
8401 if (tag & OLF_TILE)
8402 /* Tiling could use all 3 levels. */
8403 levels = 3;
8404 else
8406 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8407 Ensure at least one level, or 2 for possible auto
8408 partitioning */
8409 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8410 << OLF_DIM_BASE) | OLF_SEQ));
8412 if (levels < 1u + maybe_auto)
8413 levels = 1u + maybe_auto;
8416 args.quick_push (build_int_cst (integer_type_node, levels));
8417 args.quick_push (build_int_cst (integer_type_node, tag));
8418 if (gang_static)
8419 args.quick_push (gang_static);
8421 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8422 gimple_set_location (call, loc);
8423 gimple_set_lhs (call, ddvar);
8424 gimple_seq_add_stmt (seq, call);
8426 return levels;
8429 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8430 partitioning level of the enclosed region. */
8432 static void
8433 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8434 tree tofollow, gimple_seq *seq)
8436 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8437 : IFN_UNIQUE_OACC_TAIL_MARK);
8438 tree marker = build_int_cst (integer_type_node, marker_kind);
8439 int nargs = 2 + (tofollow != NULL_TREE);
8440 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8441 marker, ddvar, tofollow);
8442 gimple_set_location (call, loc);
8443 gimple_set_lhs (call, ddvar);
8444 gimple_seq_add_stmt (seq, call);
8447 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8448 the loop clauses, from which we extract reductions. Initialize
8449 HEAD and TAIL. */
8451 static void
8452 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8453 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8455 bool inner = false;
8456 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8457 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8459 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8461 if (private_marker)
8463 gimple_set_location (private_marker, loc);
8464 gimple_call_set_lhs (private_marker, ddvar);
8465 gimple_call_set_arg (private_marker, 1, ddvar);
8468 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8469 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8471 gcc_assert (count);
8472 for (unsigned done = 1; count; count--, done++)
8474 gimple_seq fork_seq = NULL;
8475 gimple_seq join_seq = NULL;
8477 tree place = build_int_cst (integer_type_node, -1);
8478 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8479 fork_kind, ddvar, place);
8480 gimple_set_location (fork, loc);
8481 gimple_set_lhs (fork, ddvar);
8483 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8484 join_kind, ddvar, place);
8485 gimple_set_location (join, loc);
8486 gimple_set_lhs (join, ddvar);
8488 /* Mark the beginning of this level sequence. */
8489 if (inner)
8490 lower_oacc_loop_marker (loc, ddvar, true,
8491 build_int_cst (integer_type_node, count),
8492 &fork_seq);
8493 lower_oacc_loop_marker (loc, ddvar, false,
8494 build_int_cst (integer_type_node, done),
8495 &join_seq);
8497 lower_oacc_reductions (loc, clauses, place, inner,
8498 fork, (count == 1) ? private_marker : NULL,
8499 join, &fork_seq, &join_seq, ctx);
8501 /* Append this level to head. */
8502 gimple_seq_add_seq (head, fork_seq);
8503 /* Prepend it to tail. */
8504 gimple_seq_add_seq (&join_seq, *tail);
8505 *tail = join_seq;
8507 inner = true;
8510 /* Mark the end of the sequence. */
8511 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8512 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8515 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8516 catch handler and return it. This prevents programs from violating the
8517 structured block semantics with throws. */
8519 static gimple_seq
8520 maybe_catch_exception (gimple_seq body)
8522 gimple *g;
8523 tree decl;
8525 if (!flag_exceptions)
8526 return body;
8528 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8529 decl = lang_hooks.eh_protect_cleanup_actions ();
8530 else
8531 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8533 g = gimple_build_eh_must_not_throw (decl);
8534 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8535 GIMPLE_TRY_CATCH);
8537 return gimple_seq_alloc_with_stmt (g);
8541 /* Routines to lower OMP directives into OMP-GIMPLE. */
8543 /* If ctx is a worksharing context inside of a cancellable parallel
8544 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8545 and conditional branch to parallel's cancel_label to handle
8546 cancellation in the implicit barrier. */
8548 static void
8549 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8550 gimple_seq *body)
8552 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8553 if (gimple_omp_return_nowait_p (omp_return))
8554 return;
8555 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8556 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8557 && outer->cancellable)
8559 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8560 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8561 tree lhs = create_tmp_var (c_bool_type);
8562 gimple_omp_return_set_lhs (omp_return, lhs);
8563 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8564 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8565 fold_convert (c_bool_type,
8566 boolean_false_node),
8567 outer->cancel_label, fallthru_label);
8568 gimple_seq_add_stmt (body, g);
8569 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8571 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8572 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8573 return;
8576 /* Find the first task_reduction or reduction clause or return NULL
8577 if there are none. */
8579 static inline tree
8580 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8581 enum omp_clause_code ccode)
8583 while (1)
8585 clauses = omp_find_clause (clauses, ccode);
8586 if (clauses == NULL_TREE)
8587 return NULL_TREE;
8588 if (ccode != OMP_CLAUSE_REDUCTION
8589 || code == OMP_TASKLOOP
8590 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8591 return clauses;
8592 clauses = OMP_CLAUSE_CHAIN (clauses);
8596 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8597 gimple_seq *, gimple_seq *);
8599 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8600 CTX is the enclosing OMP context for the current statement. */
8602 static void
8603 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8605 tree block, control;
8606 gimple_stmt_iterator tgsi;
8607 gomp_sections *stmt;
8608 gimple *t;
8609 gbind *new_stmt, *bind;
8610 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8612 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8614 push_gimplify_context ();
8616 dlist = NULL;
8617 ilist = NULL;
8619 tree rclauses
8620 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8621 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8622 tree rtmp = NULL_TREE;
8623 if (rclauses)
8625 tree type = build_pointer_type (pointer_sized_int_node);
8626 tree temp = create_tmp_var (type);
8627 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8628 OMP_CLAUSE_DECL (c) = temp;
8629 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8630 gimple_omp_sections_set_clauses (stmt, c);
8631 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8632 gimple_omp_sections_clauses (stmt),
8633 &ilist, &tred_dlist);
8634 rclauses = c;
8635 rtmp = make_ssa_name (type);
8636 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8639 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8640 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8642 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8643 &ilist, &dlist, ctx, NULL);
8645 control = create_tmp_var (unsigned_type_node, ".section");
8646 gimple_omp_sections_set_control (stmt, control);
8648 new_body = gimple_omp_body (stmt);
8649 gimple_omp_set_body (stmt, NULL);
8650 tgsi = gsi_start (new_body);
8651 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8653 omp_context *sctx;
8654 gimple *sec_start;
8656 sec_start = gsi_stmt (tgsi);
8657 sctx = maybe_lookup_ctx (sec_start);
8658 gcc_assert (sctx);
8660 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8661 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8662 GSI_CONTINUE_LINKING);
8663 gimple_omp_set_body (sec_start, NULL);
8665 if (gsi_one_before_end_p (tgsi))
8667 gimple_seq l = NULL;
8668 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8669 &ilist, &l, &clist, ctx);
8670 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8671 gimple_omp_section_set_last (sec_start);
8674 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8675 GSI_CONTINUE_LINKING);
8678 block = make_node (BLOCK);
8679 bind = gimple_build_bind (NULL, new_body, block);
8681 olist = NULL;
8682 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8683 &clist, ctx);
8684 if (clist)
8686 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8687 gcall *g = gimple_build_call (fndecl, 0);
8688 gimple_seq_add_stmt (&olist, g);
8689 gimple_seq_add_seq (&olist, clist);
8690 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8691 g = gimple_build_call (fndecl, 0);
8692 gimple_seq_add_stmt (&olist, g);
8695 block = make_node (BLOCK);
8696 new_stmt = gimple_build_bind (NULL, NULL, block);
8697 gsi_replace (gsi_p, new_stmt, true);
8699 pop_gimplify_context (new_stmt);
8700 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8701 BLOCK_VARS (block) = gimple_bind_vars (bind);
8702 if (BLOCK_VARS (block))
8703 TREE_USED (block) = 1;
8705 new_body = NULL;
8706 gimple_seq_add_seq (&new_body, ilist);
8707 gimple_seq_add_stmt (&new_body, stmt);
8708 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8709 gimple_seq_add_stmt (&new_body, bind);
8711 t = gimple_build_omp_continue (control, control);
8712 gimple_seq_add_stmt (&new_body, t);
8714 gimple_seq_add_seq (&new_body, olist);
8715 if (ctx->cancellable)
8716 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8717 gimple_seq_add_seq (&new_body, dlist);
8719 new_body = maybe_catch_exception (new_body);
8721 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8722 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8723 t = gimple_build_omp_return (nowait);
8724 gimple_seq_add_stmt (&new_body, t);
8725 gimple_seq_add_seq (&new_body, tred_dlist);
8726 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8728 if (rclauses)
8729 OMP_CLAUSE_DECL (rclauses) = rtmp;
8731 gimple_bind_set_body (new_stmt, new_body);
8735 /* A subroutine of lower_omp_single. Expand the simple form of
8736 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8738 if (GOMP_single_start ())
8739 BODY;
8740 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8742 FIXME. It may be better to delay expanding the logic of this until
8743 pass_expand_omp. The expanded logic may make the job more difficult
8744 to a synchronization analysis pass. */
8746 static void
8747 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8749 location_t loc = gimple_location (single_stmt);
8750 tree tlabel = create_artificial_label (loc);
8751 tree flabel = create_artificial_label (loc);
8752 gimple *call, *cond;
8753 tree lhs, decl;
8755 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8756 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8757 call = gimple_build_call (decl, 0);
8758 gimple_call_set_lhs (call, lhs);
8759 gimple_seq_add_stmt (pre_p, call);
8761 cond = gimple_build_cond (EQ_EXPR, lhs,
8762 fold_convert_loc (loc, TREE_TYPE (lhs),
8763 boolean_true_node),
8764 tlabel, flabel);
8765 gimple_seq_add_stmt (pre_p, cond);
8766 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8767 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8768 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8772 /* A subroutine of lower_omp_single. Expand the simple form of
8773 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8775 #pragma omp single copyprivate (a, b, c)
8777 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8780 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8782 BODY;
8783 copyout.a = a;
8784 copyout.b = b;
8785 copyout.c = c;
8786 GOMP_single_copy_end (&copyout);
8788 else
8790 a = copyout_p->a;
8791 b = copyout_p->b;
8792 c = copyout_p->c;
8794 GOMP_barrier ();
8797 FIXME. It may be better to delay expanding the logic of this until
8798 pass_expand_omp. The expanded logic may make the job more difficult
8799 to a synchronization analysis pass. */
8801 static void
8802 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8803 omp_context *ctx)
8805 tree ptr_type, t, l0, l1, l2, bfn_decl;
8806 gimple_seq copyin_seq;
8807 location_t loc = gimple_location (single_stmt);
8809 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8811 ptr_type = build_pointer_type (ctx->record_type);
8812 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8814 l0 = create_artificial_label (loc);
8815 l1 = create_artificial_label (loc);
8816 l2 = create_artificial_label (loc);
8818 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8819 t = build_call_expr_loc (loc, bfn_decl, 0);
8820 t = fold_convert_loc (loc, ptr_type, t);
8821 gimplify_assign (ctx->receiver_decl, t, pre_p);
8823 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8824 build_int_cst (ptr_type, 0));
8825 t = build3 (COND_EXPR, void_type_node, t,
8826 build_and_jump (&l0), build_and_jump (&l1));
8827 gimplify_and_add (t, pre_p);
8829 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8831 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8833 copyin_seq = NULL;
8834 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8835 &copyin_seq, ctx);
8837 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8838 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8839 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8840 gimplify_and_add (t, pre_p);
8842 t = build_and_jump (&l2);
8843 gimplify_and_add (t, pre_p);
8845 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8847 gimple_seq_add_seq (pre_p, copyin_seq);
8849 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8853 /* Expand code for an OpenMP single directive. */
8855 static void
8856 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8858 tree block;
8859 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8860 gbind *bind;
8861 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8863 push_gimplify_context ();
8865 block = make_node (BLOCK);
8866 bind = gimple_build_bind (NULL, NULL, block);
8867 gsi_replace (gsi_p, bind, true);
8868 bind_body = NULL;
8869 dlist = NULL;
8870 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8871 &bind_body, &dlist, ctx, NULL);
8872 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8874 gimple_seq_add_stmt (&bind_body, single_stmt);
8876 if (ctx->record_type)
8877 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8878 else
8879 lower_omp_single_simple (single_stmt, &bind_body);
8881 gimple_omp_set_body (single_stmt, NULL);
8883 gimple_seq_add_seq (&bind_body, dlist);
8885 bind_body = maybe_catch_exception (bind_body);
8887 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8888 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8889 gimple *g = gimple_build_omp_return (nowait);
8890 gimple_seq_add_stmt (&bind_body_tail, g);
8891 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8892 if (ctx->record_type)
8894 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8895 tree clobber = build_clobber (ctx->record_type);
8896 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8897 clobber), GSI_SAME_STMT);
8899 gimple_seq_add_seq (&bind_body, bind_body_tail);
8900 gimple_bind_set_body (bind, bind_body);
8902 pop_gimplify_context (bind);
8904 gimple_bind_append_vars (bind, ctx->block_vars);
8905 BLOCK_VARS (block) = ctx->block_vars;
8906 if (BLOCK_VARS (block))
8907 TREE_USED (block) = 1;
8911 /* Lower code for an OMP scope directive. */
8913 static void
8914 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8916 tree block;
8917 gimple *scope_stmt = gsi_stmt (*gsi_p);
8918 gbind *bind;
8919 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8920 gimple_seq tred_dlist = NULL;
8922 push_gimplify_context ();
8924 block = make_node (BLOCK);
8925 bind = gimple_build_bind (NULL, NULL, block);
8926 gsi_replace (gsi_p, bind, true);
8927 bind_body = NULL;
8928 dlist = NULL;
8930 tree rclauses
8931 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8932 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8933 if (rclauses)
8935 tree type = build_pointer_type (pointer_sized_int_node);
8936 tree temp = create_tmp_var (type);
8937 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8938 OMP_CLAUSE_DECL (c) = temp;
8939 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8940 gimple_omp_scope_set_clauses (scope_stmt, c);
8941 lower_omp_task_reductions (ctx, OMP_SCOPE,
8942 gimple_omp_scope_clauses (scope_stmt),
8943 &bind_body, &tred_dlist);
8944 rclauses = c;
8945 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8946 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8947 gimple_seq_add_stmt (&bind_body, stmt);
8950 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8951 &bind_body, &dlist, ctx, NULL);
8952 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8954 gimple_seq_add_stmt (&bind_body, scope_stmt);
8956 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8958 gimple_omp_set_body (scope_stmt, NULL);
8960 gimple_seq clist = NULL;
8961 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8962 &bind_body, &clist, ctx);
8963 if (clist)
8965 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8966 gcall *g = gimple_build_call (fndecl, 0);
8967 gimple_seq_add_stmt (&bind_body, g);
8968 gimple_seq_add_seq (&bind_body, clist);
8969 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8970 g = gimple_build_call (fndecl, 0);
8971 gimple_seq_add_stmt (&bind_body, g);
8974 gimple_seq_add_seq (&bind_body, dlist);
8976 bind_body = maybe_catch_exception (bind_body);
8978 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8979 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8980 gimple *g = gimple_build_omp_return (nowait);
8981 gimple_seq_add_stmt (&bind_body_tail, g);
8982 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8983 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8984 if (ctx->record_type)
8986 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8987 tree clobber = build_clobber (ctx->record_type);
8988 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8989 clobber), GSI_SAME_STMT);
8991 gimple_seq_add_seq (&bind_body, bind_body_tail);
8993 gimple_bind_set_body (bind, bind_body);
8995 pop_gimplify_context (bind);
8997 gimple_bind_append_vars (bind, ctx->block_vars);
8998 BLOCK_VARS (block) = ctx->block_vars;
8999 if (BLOCK_VARS (block))
9000 TREE_USED (block) = 1;
9002 /* Expand code for an OpenMP master or masked directive. */
9004 static void
9005 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9007 tree block, lab = NULL, x, bfn_decl;
9008 gimple *stmt = gsi_stmt (*gsi_p);
9009 gbind *bind;
9010 location_t loc = gimple_location (stmt);
9011 gimple_seq tseq;
9012 tree filter = integer_zero_node;
9014 push_gimplify_context ();
9016 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
9018 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
9019 OMP_CLAUSE_FILTER);
9020 if (filter)
9021 filter = fold_convert (integer_type_node,
9022 OMP_CLAUSE_FILTER_EXPR (filter));
9023 else
9024 filter = integer_zero_node;
9026 block = make_node (BLOCK);
9027 bind = gimple_build_bind (NULL, NULL, block);
9028 gsi_replace (gsi_p, bind, true);
9029 gimple_bind_add_stmt (bind, stmt);
9031 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9032 x = build_call_expr_loc (loc, bfn_decl, 0);
9033 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
9034 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
9035 tseq = NULL;
9036 gimplify_and_add (x, &tseq);
9037 gimple_bind_add_seq (bind, tseq);
9039 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9040 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9041 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9042 gimple_omp_set_body (stmt, NULL);
9044 gimple_bind_add_stmt (bind, gimple_build_label (lab));
9046 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9048 pop_gimplify_context (bind);
9050 gimple_bind_append_vars (bind, ctx->block_vars);
9051 BLOCK_VARS (block) = ctx->block_vars;
9054 /* Helper function for lower_omp_task_reductions. For a specific PASS
9055 find out the current clause it should be processed, or return false
9056 if all have been processed already. */
9058 static inline bool
9059 omp_task_reduction_iterate (int pass, enum tree_code code,
9060 enum omp_clause_code ccode, tree *c, tree *decl,
9061 tree *type, tree *next)
9063 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
9065 if (ccode == OMP_CLAUSE_REDUCTION
9066 && code != OMP_TASKLOOP
9067 && !OMP_CLAUSE_REDUCTION_TASK (*c))
9068 continue;
9069 *decl = OMP_CLAUSE_DECL (*c);
9070 *type = TREE_TYPE (*decl);
9071 if (TREE_CODE (*decl) == MEM_REF)
9073 if (pass != 1)
9074 continue;
9076 else
9078 if (omp_privatize_by_reference (*decl))
9079 *type = TREE_TYPE (*type);
9080 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9081 continue;
9083 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9084 return true;
9086 *decl = NULL_TREE;
9087 *type = NULL_TREE;
9088 *next = NULL_TREE;
9089 return false;
9092 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9093 OMP_TASKGROUP only with task modifier). Register mapping of those in
9094 START sequence and reducing them and unregister them in the END sequence. */
9096 static void
9097 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9098 gimple_seq *start, gimple_seq *end)
9100 enum omp_clause_code ccode
9101 = (code == OMP_TASKGROUP
9102 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9103 tree cancellable = NULL_TREE;
9104 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9105 if (clauses == NULL_TREE)
9106 return;
9107 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9109 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9110 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9111 && outer->cancellable)
9113 cancellable = error_mark_node;
9114 break;
9116 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9117 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9118 break;
9120 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9121 tree *last = &TYPE_FIELDS (record_type);
9122 unsigned cnt = 0;
9123 if (cancellable)
9125 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9126 ptr_type_node);
9127 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9128 integer_type_node);
9129 *last = field;
9130 DECL_CHAIN (field) = ifield;
9131 last = &DECL_CHAIN (ifield);
9132 DECL_CONTEXT (field) = record_type;
9133 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9134 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9135 DECL_CONTEXT (ifield) = record_type;
9136 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9137 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9139 for (int pass = 0; pass < 2; pass++)
9141 tree decl, type, next;
9142 for (tree c = clauses;
9143 omp_task_reduction_iterate (pass, code, ccode,
9144 &c, &decl, &type, &next); c = next)
9146 ++cnt;
9147 tree new_type = type;
9148 if (ctx->outer)
9149 new_type = remap_type (type, &ctx->outer->cb);
9150 tree field
9151 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9152 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9153 new_type);
9154 if (DECL_P (decl) && type == TREE_TYPE (decl))
9156 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9157 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9158 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9160 else
9161 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9162 DECL_CONTEXT (field) = record_type;
9163 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9164 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9165 *last = field;
9166 last = &DECL_CHAIN (field);
9167 tree bfield
9168 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9169 boolean_type_node);
9170 DECL_CONTEXT (bfield) = record_type;
9171 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9172 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9173 *last = bfield;
9174 last = &DECL_CHAIN (bfield);
9177 *last = NULL_TREE;
9178 layout_type (record_type);
9180 /* Build up an array which registers with the runtime all the reductions
9181 and deregisters them at the end. Format documented in libgomp/task.c. */
9182 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9183 tree avar = create_tmp_var_raw (atype);
9184 gimple_add_tmp_var (avar);
9185 TREE_ADDRESSABLE (avar) = 1;
9186 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9187 NULL_TREE, NULL_TREE);
9188 tree t = build_int_cst (pointer_sized_int_node, cnt);
9189 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9190 gimple_seq seq = NULL;
9191 tree sz = fold_convert (pointer_sized_int_node,
9192 TYPE_SIZE_UNIT (record_type));
9193 int cachesz = 64;
9194 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9195 build_int_cst (pointer_sized_int_node, cachesz - 1));
9196 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9197 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9198 ctx->task_reductions.create (1 + cnt);
9199 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9200 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9201 ? sz : NULL_TREE);
9202 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9203 gimple_seq_add_seq (start, seq);
9204 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9205 NULL_TREE, NULL_TREE);
9206 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9207 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9208 NULL_TREE, NULL_TREE);
9209 t = build_int_cst (pointer_sized_int_node,
9210 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9211 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9212 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9213 NULL_TREE, NULL_TREE);
9214 t = build_int_cst (pointer_sized_int_node, -1);
9215 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9216 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9217 NULL_TREE, NULL_TREE);
9218 t = build_int_cst (pointer_sized_int_node, 0);
9219 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9221 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9222 and for each task reduction checks a bool right after the private variable
9223 within that thread's chunk; if the bool is clear, it hasn't been
9224 initialized and thus isn't going to be reduced nor destructed, otherwise
9225 reduce and destruct it. */
9226 tree idx = create_tmp_var (size_type_node);
9227 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9228 tree num_thr_sz = create_tmp_var (size_type_node);
9229 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9230 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9231 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9232 gimple *g;
9233 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9235 /* For worksharing constructs or scope, only perform it in the master
9236 thread, with the exception of cancelled implicit barriers - then only
9237 handle the current thread. */
9238 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9239 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9240 tree thr_num = create_tmp_var (integer_type_node);
9241 g = gimple_build_call (t, 0);
9242 gimple_call_set_lhs (g, thr_num);
9243 gimple_seq_add_stmt (end, g);
9244 if (cancellable)
9246 tree c;
9247 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9248 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9249 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9250 if (code == OMP_FOR)
9251 c = gimple_omp_for_clauses (ctx->stmt);
9252 else if (code == OMP_SECTIONS)
9253 c = gimple_omp_sections_clauses (ctx->stmt);
9254 else /* if (code == OMP_SCOPE) */
9255 c = gimple_omp_scope_clauses (ctx->stmt);
9256 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9257 cancellable = c;
9258 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9259 lab5, lab6);
9260 gimple_seq_add_stmt (end, g);
9261 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9262 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9263 gimple_seq_add_stmt (end, g);
9264 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9265 build_one_cst (TREE_TYPE (idx)));
9266 gimple_seq_add_stmt (end, g);
9267 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9268 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9270 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9271 gimple_seq_add_stmt (end, g);
9272 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9274 if (code != OMP_PARALLEL)
9276 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9277 tree num_thr = create_tmp_var (integer_type_node);
9278 g = gimple_build_call (t, 0);
9279 gimple_call_set_lhs (g, num_thr);
9280 gimple_seq_add_stmt (end, g);
9281 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9282 gimple_seq_add_stmt (end, g);
9283 if (cancellable)
9284 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9286 else
9288 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9289 OMP_CLAUSE__REDUCTEMP_);
9290 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9291 t = fold_convert (size_type_node, t);
9292 gimplify_assign (num_thr_sz, t, end);
9294 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9295 NULL_TREE, NULL_TREE);
9296 tree data = create_tmp_var (pointer_sized_int_node);
9297 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9298 if (code == OMP_TASKLOOP)
9300 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9301 g = gimple_build_cond (NE_EXPR, data,
9302 build_zero_cst (pointer_sized_int_node),
9303 lab1, lab7);
9304 gimple_seq_add_stmt (end, g);
9306 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9307 tree ptr;
9308 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9309 ptr = create_tmp_var (build_pointer_type (record_type));
9310 else
9311 ptr = create_tmp_var (ptr_type_node);
9312 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9314 tree field = TYPE_FIELDS (record_type);
9315 cnt = 0;
9316 if (cancellable)
9317 field = DECL_CHAIN (DECL_CHAIN (field));
9318 for (int pass = 0; pass < 2; pass++)
9320 tree decl, type, next;
9321 for (tree c = clauses;
9322 omp_task_reduction_iterate (pass, code, ccode,
9323 &c, &decl, &type, &next); c = next)
9325 tree var = decl, ref;
9326 if (TREE_CODE (decl) == MEM_REF)
9328 var = TREE_OPERAND (var, 0);
9329 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9330 var = TREE_OPERAND (var, 0);
9331 tree v = var;
9332 if (TREE_CODE (var) == ADDR_EXPR)
9333 var = TREE_OPERAND (var, 0);
9334 else if (TREE_CODE (var) == INDIRECT_REF)
9335 var = TREE_OPERAND (var, 0);
9336 tree orig_var = var;
9337 if (is_variable_sized (var))
9339 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9340 var = DECL_VALUE_EXPR (var);
9341 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9342 var = TREE_OPERAND (var, 0);
9343 gcc_assert (DECL_P (var));
9345 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9346 if (orig_var != var)
9347 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9348 else if (TREE_CODE (v) == ADDR_EXPR)
9349 t = build_fold_addr_expr (t);
9350 else if (TREE_CODE (v) == INDIRECT_REF)
9351 t = build_fold_indirect_ref (t);
9352 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9354 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9355 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9356 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9358 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9359 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9360 fold_convert (size_type_node,
9361 TREE_OPERAND (decl, 1)));
9363 else
9365 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9366 if (!omp_privatize_by_reference (decl))
9367 t = build_fold_addr_expr (t);
9369 t = fold_convert (pointer_sized_int_node, t);
9370 seq = NULL;
9371 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9372 gimple_seq_add_seq (start, seq);
9373 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9374 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9375 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9376 t = unshare_expr (byte_position (field));
9377 t = fold_convert (pointer_sized_int_node, t);
9378 ctx->task_reduction_map->put (c, cnt);
9379 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9380 ? t : NULL_TREE);
9381 seq = NULL;
9382 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9383 gimple_seq_add_seq (start, seq);
9384 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9385 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9386 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9388 tree bfield = DECL_CHAIN (field);
9389 tree cond;
9390 if (code == OMP_PARALLEL
9391 || code == OMP_FOR
9392 || code == OMP_SECTIONS
9393 || code == OMP_SCOPE)
9394 /* In parallel, worksharing or scope all threads unconditionally
9395 initialize all their task reduction private variables. */
9396 cond = boolean_true_node;
9397 else if (TREE_TYPE (ptr) == ptr_type_node)
9399 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9400 unshare_expr (byte_position (bfield)));
9401 seq = NULL;
9402 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9403 gimple_seq_add_seq (end, seq);
9404 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9405 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9406 build_int_cst (pbool, 0));
9408 else
9409 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9410 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9411 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9412 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9413 tree condv = create_tmp_var (boolean_type_node);
9414 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9415 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9416 lab3, lab4);
9417 gimple_seq_add_stmt (end, g);
9418 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9419 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9421 /* If this reduction doesn't need destruction and parallel
9422 has been cancelled, there is nothing to do for this
9423 reduction, so jump around the merge operation. */
9424 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9425 g = gimple_build_cond (NE_EXPR, cancellable,
9426 build_zero_cst (TREE_TYPE (cancellable)),
9427 lab4, lab5);
9428 gimple_seq_add_stmt (end, g);
9429 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9432 tree new_var;
9433 if (TREE_TYPE (ptr) == ptr_type_node)
9435 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9436 unshare_expr (byte_position (field)));
9437 seq = NULL;
9438 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9439 gimple_seq_add_seq (end, seq);
9440 tree pbool = build_pointer_type (TREE_TYPE (field));
9441 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9442 build_int_cst (pbool, 0));
9444 else
9445 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9446 build_simple_mem_ref (ptr), field, NULL_TREE);
9448 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9449 if (TREE_CODE (decl) != MEM_REF
9450 && omp_privatize_by_reference (decl))
9451 ref = build_simple_mem_ref (ref);
9452 /* reduction(-:var) sums up the partial results, so it acts
9453 identically to reduction(+:var). */
9454 if (rcode == MINUS_EXPR)
9455 rcode = PLUS_EXPR;
9456 if (TREE_CODE (decl) == MEM_REF)
9458 tree type = TREE_TYPE (new_var);
9459 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9460 tree i = create_tmp_var (TREE_TYPE (v));
9461 tree ptype = build_pointer_type (TREE_TYPE (type));
9462 if (DECL_P (v))
9464 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9465 tree vv = create_tmp_var (TREE_TYPE (v));
9466 gimplify_assign (vv, v, start);
9467 v = vv;
9469 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9470 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9471 new_var = build_fold_addr_expr (new_var);
9472 new_var = fold_convert (ptype, new_var);
9473 ref = fold_convert (ptype, ref);
9474 tree m = create_tmp_var (ptype);
9475 gimplify_assign (m, new_var, end);
9476 new_var = m;
9477 m = create_tmp_var (ptype);
9478 gimplify_assign (m, ref, end);
9479 ref = m;
9480 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9481 tree body = create_artificial_label (UNKNOWN_LOCATION);
9482 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9483 gimple_seq_add_stmt (end, gimple_build_label (body));
9484 tree priv = build_simple_mem_ref (new_var);
9485 tree out = build_simple_mem_ref (ref);
9486 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9488 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9489 tree decl_placeholder
9490 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9491 tree lab6 = NULL_TREE;
9492 if (cancellable)
9494 /* If this reduction needs destruction and parallel
9495 has been cancelled, jump around the merge operation
9496 to the destruction. */
9497 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9498 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9499 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9500 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9501 lab6, lab5);
9502 gimple_seq_add_stmt (end, g);
9503 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9505 SET_DECL_VALUE_EXPR (placeholder, out);
9506 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9507 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9508 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9509 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9510 gimple_seq_add_seq (end,
9511 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9512 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9513 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9515 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9516 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9518 if (cancellable)
9519 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9520 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9521 if (x)
9523 gimple_seq tseq = NULL;
9524 gimplify_stmt (&x, &tseq);
9525 gimple_seq_add_seq (end, tseq);
9528 else
9530 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9531 out = unshare_expr (out);
9532 gimplify_assign (out, x, end);
9534 gimple *g
9535 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9536 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9537 gimple_seq_add_stmt (end, g);
9538 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9539 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9540 gimple_seq_add_stmt (end, g);
9541 g = gimple_build_assign (i, PLUS_EXPR, i,
9542 build_int_cst (TREE_TYPE (i), 1));
9543 gimple_seq_add_stmt (end, g);
9544 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9545 gimple_seq_add_stmt (end, g);
9546 gimple_seq_add_stmt (end, gimple_build_label (endl));
9548 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9550 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9551 tree oldv = NULL_TREE;
9552 tree lab6 = NULL_TREE;
9553 if (cancellable)
9555 /* If this reduction needs destruction and parallel
9556 has been cancelled, jump around the merge operation
9557 to the destruction. */
9558 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9559 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9560 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9561 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9562 lab6, lab5);
9563 gimple_seq_add_stmt (end, g);
9564 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9566 if (omp_privatize_by_reference (decl)
9567 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9568 TREE_TYPE (ref)))
9569 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9570 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9571 tree refv = create_tmp_var (TREE_TYPE (ref));
9572 gimplify_assign (refv, ref, end);
9573 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9574 SET_DECL_VALUE_EXPR (placeholder, ref);
9575 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9576 tree d = maybe_lookup_decl (decl, ctx);
9577 gcc_assert (d);
9578 if (DECL_HAS_VALUE_EXPR_P (d))
9579 oldv = DECL_VALUE_EXPR (d);
9580 if (omp_privatize_by_reference (var))
9582 tree v = fold_convert (TREE_TYPE (d),
9583 build_fold_addr_expr (new_var));
9584 SET_DECL_VALUE_EXPR (d, v);
9586 else
9587 SET_DECL_VALUE_EXPR (d, new_var);
9588 DECL_HAS_VALUE_EXPR_P (d) = 1;
9589 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9590 if (oldv)
9591 SET_DECL_VALUE_EXPR (d, oldv);
9592 else
9594 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9595 DECL_HAS_VALUE_EXPR_P (d) = 0;
9597 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9598 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9599 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9600 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9601 if (cancellable)
9602 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9603 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9604 if (x)
9606 gimple_seq tseq = NULL;
9607 gimplify_stmt (&x, &tseq);
9608 gimple_seq_add_seq (end, tseq);
9611 else
9613 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9614 ref = unshare_expr (ref);
9615 gimplify_assign (ref, x, end);
9617 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9618 ++cnt;
9619 field = DECL_CHAIN (bfield);
9623 if (code == OMP_TASKGROUP)
9625 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9626 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9627 gimple_seq_add_stmt (start, g);
9629 else
9631 tree c;
9632 if (code == OMP_FOR)
9633 c = gimple_omp_for_clauses (ctx->stmt);
9634 else if (code == OMP_SECTIONS)
9635 c = gimple_omp_sections_clauses (ctx->stmt);
9636 else if (code == OMP_SCOPE)
9637 c = gimple_omp_scope_clauses (ctx->stmt);
9638 else
9639 c = gimple_omp_taskreg_clauses (ctx->stmt);
9640 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9641 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9642 build_fold_addr_expr (avar));
9643 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9646 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9647 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9648 size_one_node));
9649 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9650 gimple_seq_add_stmt (end, g);
9651 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9652 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9654 enum built_in_function bfn
9655 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9656 t = builtin_decl_explicit (bfn);
9657 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9658 tree arg;
9659 if (cancellable)
9661 arg = create_tmp_var (c_bool_type);
9662 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9663 cancellable));
9665 else
9666 arg = build_int_cst (c_bool_type, 0);
9667 g = gimple_build_call (t, 1, arg);
9669 else
9671 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9672 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9674 gimple_seq_add_stmt (end, g);
9675 if (lab7)
9676 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9677 t = build_constructor (atype, NULL);
9678 TREE_THIS_VOLATILE (t) = 1;
9679 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9682 /* Expand code for an OpenMP taskgroup directive. */
9684 static void
9685 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9687 gimple *stmt = gsi_stmt (*gsi_p);
9688 gcall *x;
9689 gbind *bind;
9690 gimple_seq dseq = NULL;
9691 tree block = make_node (BLOCK);
9693 bind = gimple_build_bind (NULL, NULL, block);
9694 gsi_replace (gsi_p, bind, true);
9695 gimple_bind_add_stmt (bind, stmt);
9697 push_gimplify_context ();
9699 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9701 gimple_bind_add_stmt (bind, x);
9703 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9704 gimple_omp_taskgroup_clauses (stmt),
9705 gimple_bind_body_ptr (bind), &dseq);
9707 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9708 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9709 gimple_omp_set_body (stmt, NULL);
9711 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9712 gimple_bind_add_seq (bind, dseq);
9714 pop_gimplify_context (bind);
9716 gimple_bind_append_vars (bind, ctx->block_vars);
9717 BLOCK_VARS (block) = ctx->block_vars;
9721 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9723 static void
9724 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9725 omp_context *ctx)
9727 struct omp_for_data fd;
9728 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9729 return;
9731 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9732 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9733 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9734 if (!fd.ordered)
9735 return;
9737 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9738 tree c = gimple_omp_ordered_clauses (ord_stmt);
9739 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9740 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9742 /* Merge depend clauses from multiple adjacent
9743 #pragma omp ordered depend(sink:...) constructs
9744 into one #pragma omp ordered depend(sink:...), so that
9745 we can optimize them together. */
9746 gimple_stmt_iterator gsi = *gsi_p;
9747 gsi_next (&gsi);
9748 while (!gsi_end_p (gsi))
9750 gimple *stmt = gsi_stmt (gsi);
9751 if (is_gimple_debug (stmt)
9752 || gimple_code (stmt) == GIMPLE_NOP)
9754 gsi_next (&gsi);
9755 continue;
9757 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9758 break;
9759 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9760 c = gimple_omp_ordered_clauses (ord_stmt2);
9761 if (c == NULL_TREE
9762 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9763 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9764 break;
9765 while (*list_p)
9766 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9767 *list_p = c;
9768 gsi_remove (&gsi, true);
9772 /* Canonicalize sink dependence clauses into one folded clause if
9773 possible.
9775 The basic algorithm is to create a sink vector whose first
9776 element is the GCD of all the first elements, and whose remaining
9777 elements are the minimum of the subsequent columns.
9779 We ignore dependence vectors whose first element is zero because
9780 such dependencies are known to be executed by the same thread.
9782 We take into account the direction of the loop, so a minimum
9783 becomes a maximum if the loop is iterating forwards. We also
9784 ignore sink clauses where the loop direction is unknown, or where
9785 the offsets are clearly invalid because they are not a multiple
9786 of the loop increment.
9788 For example:
9790 #pragma omp for ordered(2)
9791 for (i=0; i < N; ++i)
9792 for (j=0; j < M; ++j)
9794 #pragma omp ordered \
9795 depend(sink:i-8,j-2) \
9796 depend(sink:i,j-1) \ // Completely ignored because i+0.
9797 depend(sink:i-4,j-3) \
9798 depend(sink:i-6,j-4)
9799 #pragma omp ordered depend(source)
9802 Folded clause is:
9804 depend(sink:-gcd(8,4,6),-min(2,3,4))
9805 -or-
9806 depend(sink:-2,-2)
9809 /* FIXME: Computing GCD's where the first element is zero is
9810 non-trivial in the presence of collapsed loops. Do this later. */
9811 if (fd.collapse > 1)
9812 return;
9814 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9816 /* wide_int is not a POD so it must be default-constructed. */
9817 for (unsigned i = 0; i != 2 * len - 1; ++i)
9818 new (static_cast<void*>(folded_deps + i)) wide_int ();
9820 tree folded_dep = NULL_TREE;
9821 /* TRUE if the first dimension's offset is negative. */
9822 bool neg_offset_p = false;
9824 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9825 unsigned int i;
9826 while ((c = *list_p) != NULL)
9828 bool remove = false;
9830 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9831 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9832 goto next_ordered_clause;
9834 tree vec;
9835 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9836 vec && TREE_CODE (vec) == TREE_LIST;
9837 vec = TREE_CHAIN (vec), ++i)
9839 gcc_assert (i < len);
9841 /* omp_extract_for_data has canonicalized the condition. */
9842 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9843 || fd.loops[i].cond_code == GT_EXPR);
9844 bool forward = fd.loops[i].cond_code == LT_EXPR;
9845 bool maybe_lexically_later = true;
9847 /* While the committee makes up its mind, bail if we have any
9848 non-constant steps. */
9849 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9850 goto lower_omp_ordered_ret;
9852 tree itype = TREE_TYPE (TREE_VALUE (vec));
9853 if (POINTER_TYPE_P (itype))
9854 itype = sizetype;
9855 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9856 TYPE_PRECISION (itype),
9857 TYPE_SIGN (itype));
9859 /* Ignore invalid offsets that are not multiples of the step. */
9860 if (!wi::multiple_of_p (wi::abs (offset),
9861 wi::abs (wi::to_wide (fd.loops[i].step)),
9862 UNSIGNED))
9864 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9865 "ignoring sink clause with offset that is not "
9866 "a multiple of the loop step");
9867 remove = true;
9868 goto next_ordered_clause;
9871 /* Calculate the first dimension. The first dimension of
9872 the folded dependency vector is the GCD of the first
9873 elements, while ignoring any first elements whose offset
9874 is 0. */
9875 if (i == 0)
9877 /* Ignore dependence vectors whose first dimension is 0. */
9878 if (offset == 0)
9880 remove = true;
9881 goto next_ordered_clause;
9883 else
9885 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9887 error_at (OMP_CLAUSE_LOCATION (c),
9888 "first offset must be in opposite direction "
9889 "of loop iterations");
9890 goto lower_omp_ordered_ret;
9892 if (forward)
9893 offset = -offset;
9894 neg_offset_p = forward;
9895 /* Initialize the first time around. */
9896 if (folded_dep == NULL_TREE)
9898 folded_dep = c;
9899 folded_deps[0] = offset;
9901 else
9902 folded_deps[0] = wi::gcd (folded_deps[0],
9903 offset, UNSIGNED);
9906 /* Calculate minimum for the remaining dimensions. */
9907 else
9909 folded_deps[len + i - 1] = offset;
9910 if (folded_dep == c)
9911 folded_deps[i] = offset;
9912 else if (maybe_lexically_later
9913 && !wi::eq_p (folded_deps[i], offset))
9915 if (forward ^ wi::gts_p (folded_deps[i], offset))
9917 unsigned int j;
9918 folded_dep = c;
9919 for (j = 1; j <= i; j++)
9920 folded_deps[j] = folded_deps[len + j - 1];
9922 else
9923 maybe_lexically_later = false;
9927 gcc_assert (i == len);
9929 remove = true;
9931 next_ordered_clause:
9932 if (remove)
9933 *list_p = OMP_CLAUSE_CHAIN (c);
9934 else
9935 list_p = &OMP_CLAUSE_CHAIN (c);
9938 if (folded_dep)
9940 if (neg_offset_p)
9941 folded_deps[0] = -folded_deps[0];
9943 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9944 if (POINTER_TYPE_P (itype))
9945 itype = sizetype;
9947 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9948 = wide_int_to_tree (itype, folded_deps[0]);
9949 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9950 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9953 lower_omp_ordered_ret:
9955 /* Ordered without clauses is #pragma omp threads, while we want
9956 a nop instead if we remove all clauses. */
9957 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9958 gsi_replace (gsi_p, gimple_build_nop (), true);
9962 /* Expand code for an OpenMP ordered directive. */
9964 static void
9965 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9967 tree block;
9968 gimple *stmt = gsi_stmt (*gsi_p), *g;
9969 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9970 gcall *x;
9971 gbind *bind;
9972 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9973 OMP_CLAUSE_SIMD);
9974 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9975 loop. */
9976 bool maybe_simt
9977 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9978 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9979 OMP_CLAUSE_THREADS);
9981 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9982 OMP_CLAUSE_DEPEND))
9984 /* FIXME: This is needs to be moved to the expansion to verify various
9985 conditions only testable on cfg with dominators computed, and also
9986 all the depend clauses to be merged still might need to be available
9987 for the runtime checks. */
9988 if (0)
9989 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9990 return;
9993 push_gimplify_context ();
9995 block = make_node (BLOCK);
9996 bind = gimple_build_bind (NULL, NULL, block);
9997 gsi_replace (gsi_p, bind, true);
9998 gimple_bind_add_stmt (bind, stmt);
10000 if (simd)
10002 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
10003 build_int_cst (NULL_TREE, threads));
10004 cfun->has_simduid_loops = true;
10006 else
10007 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
10009 gimple_bind_add_stmt (bind, x);
10011 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
10012 if (maybe_simt)
10014 counter = create_tmp_var (integer_type_node);
10015 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
10016 gimple_call_set_lhs (g, counter);
10017 gimple_bind_add_stmt (bind, g);
10019 body = create_artificial_label (UNKNOWN_LOCATION);
10020 test = create_artificial_label (UNKNOWN_LOCATION);
10021 gimple_bind_add_stmt (bind, gimple_build_label (body));
10023 tree simt_pred = create_tmp_var (integer_type_node);
10024 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
10025 gimple_call_set_lhs (g, simt_pred);
10026 gimple_bind_add_stmt (bind, g);
10028 tree t = create_artificial_label (UNKNOWN_LOCATION);
10029 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
10030 gimple_bind_add_stmt (bind, g);
10032 gimple_bind_add_stmt (bind, gimple_build_label (t));
10034 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10035 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10036 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10037 gimple_omp_set_body (stmt, NULL);
10039 if (maybe_simt)
10041 gimple_bind_add_stmt (bind, gimple_build_label (test));
10042 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
10043 gimple_bind_add_stmt (bind, g);
10045 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
10046 tree nonneg = create_tmp_var (integer_type_node);
10047 gimple_seq tseq = NULL;
10048 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
10049 gimple_bind_add_seq (bind, tseq);
10051 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
10052 gimple_call_set_lhs (g, nonneg);
10053 gimple_bind_add_stmt (bind, g);
10055 tree end = create_artificial_label (UNKNOWN_LOCATION);
10056 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
10057 gimple_bind_add_stmt (bind, g);
10059 gimple_bind_add_stmt (bind, gimple_build_label (end));
10061 if (simd)
10062 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
10063 build_int_cst (NULL_TREE, threads));
10064 else
10065 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
10067 gimple_bind_add_stmt (bind, x);
10069 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10071 pop_gimplify_context (bind);
10073 gimple_bind_append_vars (bind, ctx->block_vars);
10074 BLOCK_VARS (block) = gimple_bind_vars (bind);
10078 /* Expand code for an OpenMP scan directive and the structured block
10079 before the scan directive. */
10081 static void
10082 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10084 gimple *stmt = gsi_stmt (*gsi_p);
10085 bool has_clauses
10086 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10087 tree lane = NULL_TREE;
10088 gimple_seq before = NULL;
10089 omp_context *octx = ctx->outer;
10090 gcc_assert (octx);
10091 if (octx->scan_exclusive && !has_clauses)
10093 gimple_stmt_iterator gsi2 = *gsi_p;
10094 gsi_next (&gsi2);
10095 gimple *stmt2 = gsi_stmt (gsi2);
10096 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10097 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10098 the one with exclusive clause(s), comes first. */
10099 if (stmt2
10100 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10101 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10103 gsi_remove (gsi_p, false);
10104 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10105 ctx = maybe_lookup_ctx (stmt2);
10106 gcc_assert (ctx);
10107 lower_omp_scan (gsi_p, ctx);
10108 return;
10112 bool input_phase = has_clauses ^ octx->scan_inclusive;
10113 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10114 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10115 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10116 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10117 && !gimple_omp_for_combined_p (octx->stmt));
10118 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10119 if (is_for_simd && octx->for_simd_scan_phase)
10120 is_simd = false;
10121 if (is_simd)
10122 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10123 OMP_CLAUSE__SIMDUID_))
10125 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10126 lane = create_tmp_var (unsigned_type_node);
10127 tree t = build_int_cst (integer_type_node,
10128 input_phase ? 1
10129 : octx->scan_inclusive ? 2 : 3);
10130 gimple *g
10131 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10132 gimple_call_set_lhs (g, lane);
10133 gimple_seq_add_stmt (&before, g);
10136 if (is_simd || is_for)
10138 for (tree c = gimple_omp_for_clauses (octx->stmt);
10139 c; c = OMP_CLAUSE_CHAIN (c))
10140 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10141 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10143 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10144 tree var = OMP_CLAUSE_DECL (c);
10145 tree new_var = lookup_decl (var, octx);
10146 tree val = new_var;
10147 tree var2 = NULL_TREE;
10148 tree var3 = NULL_TREE;
10149 tree var4 = NULL_TREE;
10150 tree lane0 = NULL_TREE;
10151 tree new_vard = new_var;
10152 if (omp_privatize_by_reference (var))
10154 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10155 val = new_var;
10157 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10159 val = DECL_VALUE_EXPR (new_vard);
10160 if (new_vard != new_var)
10162 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10163 val = TREE_OPERAND (val, 0);
10165 if (TREE_CODE (val) == ARRAY_REF
10166 && VAR_P (TREE_OPERAND (val, 0)))
10168 tree v = TREE_OPERAND (val, 0);
10169 if (lookup_attribute ("omp simd array",
10170 DECL_ATTRIBUTES (v)))
10172 val = unshare_expr (val);
10173 lane0 = TREE_OPERAND (val, 1);
10174 TREE_OPERAND (val, 1) = lane;
10175 var2 = lookup_decl (v, octx);
10176 if (octx->scan_exclusive)
10177 var4 = lookup_decl (var2, octx);
10178 if (input_phase
10179 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10180 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10181 if (!input_phase)
10183 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10184 var2, lane, NULL_TREE, NULL_TREE);
10185 TREE_THIS_NOTRAP (var2) = 1;
10186 if (octx->scan_exclusive)
10188 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10189 var4, lane, NULL_TREE,
10190 NULL_TREE);
10191 TREE_THIS_NOTRAP (var4) = 1;
10194 else
10195 var2 = val;
10198 gcc_assert (var2);
10200 else
10202 var2 = build_outer_var_ref (var, octx);
10203 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10205 var3 = maybe_lookup_decl (new_vard, octx);
10206 if (var3 == new_vard || var3 == NULL_TREE)
10207 var3 = NULL_TREE;
10208 else if (is_simd && octx->scan_exclusive && !input_phase)
10210 var4 = maybe_lookup_decl (var3, octx);
10211 if (var4 == var3 || var4 == NULL_TREE)
10213 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10215 var4 = var3;
10216 var3 = NULL_TREE;
10218 else
10219 var4 = NULL_TREE;
10223 if (is_simd
10224 && octx->scan_exclusive
10225 && !input_phase
10226 && var4 == NULL_TREE)
10227 var4 = create_tmp_var (TREE_TYPE (val));
10229 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10231 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10232 if (input_phase)
10234 if (var3)
10236 /* If we've added a separate identity element
10237 variable, copy it over into val. */
10238 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10239 var3);
10240 gimplify_and_add (x, &before);
10242 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10244 /* Otherwise, assign to it the identity element. */
10245 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10246 if (is_for)
10247 tseq = copy_gimple_seq_and_replace_locals (tseq);
10248 tree ref = build_outer_var_ref (var, octx);
10249 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10250 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10251 if (x)
10253 if (new_vard != new_var)
10254 val = build_fold_addr_expr_loc (clause_loc, val);
10255 SET_DECL_VALUE_EXPR (new_vard, val);
10257 SET_DECL_VALUE_EXPR (placeholder, ref);
10258 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10259 lower_omp (&tseq, octx);
10260 if (x)
10261 SET_DECL_VALUE_EXPR (new_vard, x);
10262 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10263 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10264 gimple_seq_add_seq (&before, tseq);
10265 if (is_simd)
10266 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10269 else if (is_simd)
10271 tree x;
10272 if (octx->scan_exclusive)
10274 tree v4 = unshare_expr (var4);
10275 tree v2 = unshare_expr (var2);
10276 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10277 gimplify_and_add (x, &before);
10279 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10280 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10281 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10282 tree vexpr = val;
10283 if (x && new_vard != new_var)
10284 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10285 if (x)
10286 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10287 SET_DECL_VALUE_EXPR (placeholder, var2);
10288 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10289 lower_omp (&tseq, octx);
10290 gimple_seq_add_seq (&before, tseq);
10291 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10292 if (x)
10293 SET_DECL_VALUE_EXPR (new_vard, x);
10294 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10295 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10296 if (octx->scan_inclusive)
10298 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10299 var2);
10300 gimplify_and_add (x, &before);
10302 else if (lane0 == NULL_TREE)
10304 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10305 var4);
10306 gimplify_and_add (x, &before);
10310 else
10312 if (input_phase)
10314 /* input phase. Set val to initializer before
10315 the body. */
10316 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10317 gimplify_assign (val, x, &before);
10319 else if (is_simd)
10321 /* scan phase. */
10322 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10323 if (code == MINUS_EXPR)
10324 code = PLUS_EXPR;
10326 tree x = build2 (code, TREE_TYPE (var2),
10327 unshare_expr (var2), unshare_expr (val));
10328 if (octx->scan_inclusive)
10330 gimplify_assign (unshare_expr (var2), x, &before);
10331 gimplify_assign (val, var2, &before);
10333 else
10335 gimplify_assign (unshare_expr (var4),
10336 unshare_expr (var2), &before);
10337 gimplify_assign (var2, x, &before);
10338 if (lane0 == NULL_TREE)
10339 gimplify_assign (val, var4, &before);
10343 if (octx->scan_exclusive && !input_phase && lane0)
10345 tree vexpr = unshare_expr (var4);
10346 TREE_OPERAND (vexpr, 1) = lane0;
10347 if (new_vard != new_var)
10348 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10349 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10353 if (is_simd && !is_for_simd)
10355 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10356 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10357 gsi_replace (gsi_p, gimple_build_nop (), true);
10358 return;
10360 lower_omp (gimple_omp_body_ptr (stmt), octx);
10361 if (before)
10363 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (stmt));
10364 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10369 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10370 substitution of a couple of function calls. But in the NAMED case,
10371 requires that languages coordinate a symbol name. It is therefore
10372 best put here in common code. */
10374 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10376 static void
10377 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10379 tree block;
10380 tree name, lock, unlock;
10381 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10382 gbind *bind;
10383 location_t loc = gimple_location (stmt);
10384 gimple_seq tbody;
10386 name = gimple_omp_critical_name (stmt);
10387 if (name)
10389 tree decl;
10391 if (!critical_name_mutexes)
10392 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10394 tree *n = critical_name_mutexes->get (name);
10395 if (n == NULL)
10397 char *new_str;
10399 decl = create_tmp_var_raw (ptr_type_node);
10401 new_str = ACONCAT ((".gomp_critical_user_",
10402 IDENTIFIER_POINTER (name), NULL));
10403 DECL_NAME (decl) = get_identifier (new_str);
10404 TREE_PUBLIC (decl) = 1;
10405 TREE_STATIC (decl) = 1;
10406 DECL_COMMON (decl) = 1;
10407 DECL_ARTIFICIAL (decl) = 1;
10408 DECL_IGNORED_P (decl) = 1;
10410 varpool_node::finalize_decl (decl);
10412 critical_name_mutexes->put (name, decl);
10414 else
10415 decl = *n;
10417 /* If '#pragma omp critical' is inside offloaded region or
10418 inside function marked as offloadable, the symbol must be
10419 marked as offloadable too. */
10420 omp_context *octx;
10421 if (cgraph_node::get (current_function_decl)->offloadable)
10422 varpool_node::get_create (decl)->offloadable = 1;
10423 else
10424 for (octx = ctx->outer; octx; octx = octx->outer)
10425 if (is_gimple_omp_offloaded (octx->stmt))
10427 varpool_node::get_create (decl)->offloadable = 1;
10428 break;
10431 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10432 lock = build_call_expr_loc (loc, lock, 1,
10433 build_fold_addr_expr_loc (loc, decl));
10435 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10436 unlock = build_call_expr_loc (loc, unlock, 1,
10437 build_fold_addr_expr_loc (loc, decl));
10439 else
10441 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10442 lock = build_call_expr_loc (loc, lock, 0);
10444 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10445 unlock = build_call_expr_loc (loc, unlock, 0);
10448 push_gimplify_context ();
10450 block = make_node (BLOCK);
10451 bind = gimple_build_bind (NULL, NULL, block);
10452 gsi_replace (gsi_p, bind, true);
10453 gimple_bind_add_stmt (bind, stmt);
10455 tbody = gimple_bind_body (bind);
10456 gimplify_and_add (lock, &tbody);
10457 gimple_bind_set_body (bind, tbody);
10459 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10460 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10461 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10462 gimple_omp_set_body (stmt, NULL);
10464 tbody = gimple_bind_body (bind);
10465 gimplify_and_add (unlock, &tbody);
10466 gimple_bind_set_body (bind, tbody);
10468 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10470 pop_gimplify_context (bind);
10471 gimple_bind_append_vars (bind, ctx->block_vars);
10472 BLOCK_VARS (block) = gimple_bind_vars (bind);
10475 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10476 for a lastprivate clause. Given a loop control predicate of (V
10477 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10478 is appended to *DLIST, iterator initialization is appended to
10479 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10480 to be emitted in a critical section. */
10482 static void
10483 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10484 gimple_seq *dlist, gimple_seq *clist,
10485 struct omp_context *ctx)
10487 tree clauses, cond, vinit;
10488 enum tree_code cond_code;
10489 gimple_seq stmts;
10491 cond_code = fd->loop.cond_code;
10492 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10494 /* When possible, use a strict equality expression. This can let VRP
10495 type optimizations deduce the value and remove a copy. */
10496 if (tree_fits_shwi_p (fd->loop.step))
10498 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10499 if (step == 1 || step == -1)
10500 cond_code = EQ_EXPR;
10503 tree n2 = fd->loop.n2;
10504 if (fd->collapse > 1
10505 && TREE_CODE (n2) != INTEGER_CST
10506 && gimple_omp_for_combined_into_p (fd->for_stmt))
10508 struct omp_context *taskreg_ctx = NULL;
10509 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10511 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10512 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10513 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10515 if (gimple_omp_for_combined_into_p (gfor))
10517 gcc_assert (ctx->outer->outer
10518 && is_parallel_ctx (ctx->outer->outer));
10519 taskreg_ctx = ctx->outer->outer;
10521 else
10523 struct omp_for_data outer_fd;
10524 omp_extract_for_data (gfor, &outer_fd, NULL);
10525 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10528 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10529 taskreg_ctx = ctx->outer->outer;
10531 else if (is_taskreg_ctx (ctx->outer))
10532 taskreg_ctx = ctx->outer;
10533 if (taskreg_ctx)
10535 int i;
10536 tree taskreg_clauses
10537 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10538 tree innerc = omp_find_clause (taskreg_clauses,
10539 OMP_CLAUSE__LOOPTEMP_);
10540 gcc_assert (innerc);
10541 int count = fd->collapse;
10542 if (fd->non_rect
10543 && fd->last_nonrect == fd->first_nonrect + 1)
10544 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10545 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10546 count += 4;
10547 for (i = 0; i < count; i++)
10549 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10550 OMP_CLAUSE__LOOPTEMP_);
10551 gcc_assert (innerc);
10553 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10554 OMP_CLAUSE__LOOPTEMP_);
10555 if (innerc)
10556 n2 = fold_convert (TREE_TYPE (n2),
10557 lookup_decl (OMP_CLAUSE_DECL (innerc),
10558 taskreg_ctx));
10561 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10563 clauses = gimple_omp_for_clauses (fd->for_stmt);
10564 stmts = NULL;
10565 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10566 if (!gimple_seq_empty_p (stmts))
10568 gimple_seq_add_seq (&stmts, *dlist);
10569 *dlist = stmts;
10571 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10572 vinit = fd->loop.n1;
10573 if (cond_code == EQ_EXPR
10574 && tree_fits_shwi_p (fd->loop.n2)
10575 && ! integer_zerop (fd->loop.n2))
10576 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10577 else
10578 vinit = unshare_expr (vinit);
10580 /* Initialize the iterator variable, so that threads that don't execute
10581 any iterations don't execute the lastprivate clauses by accident. */
10582 gimplify_assign (fd->loop.v, vinit, body_p);
10586 /* OpenACC privatization.
10588 Or, in other words, *sharing* at the respective OpenACC level of
10589 parallelism.
10591 From a correctness perspective, a non-addressable variable can't be accessed
10592 outside the current thread, so it can go in a (faster than shared memory)
10593 register -- though that register may need to be broadcast in some
10594 circumstances. A variable can only meaningfully be "shared" across workers
10595 or vector lanes if its address is taken, e.g. by a call to an atomic
10596 builtin.
10598 From an optimisation perspective, the answer might be fuzzier: maybe
10599 sometimes, using shared memory directly would be faster than
10600 broadcasting. */
10602 static void
10603 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10604 const location_t loc, const tree c,
10605 const tree decl)
10607 const dump_user_location_t d_u_loc
10608 = dump_user_location_t::from_location_t (loc);
10609 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10610 #if __GNUC__ >= 10
10611 # pragma GCC diagnostic push
10612 # pragma GCC diagnostic ignored "-Wformat"
10613 #endif
10614 dump_printf_loc (l_dump_flags, d_u_loc,
10615 "variable %<%T%> ", decl);
10616 #if __GNUC__ >= 10
10617 # pragma GCC diagnostic pop
10618 #endif
10619 if (c)
10620 dump_printf (l_dump_flags,
10621 "in %qs clause ",
10622 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10623 else
10624 dump_printf (l_dump_flags,
10625 "declared in block ");
10628 static bool
10629 oacc_privatization_candidate_p (const location_t loc, const tree c,
10630 const tree decl)
10632 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10634 /* There is some differentiation depending on block vs. clause. */
10635 bool block = !c;
10637 bool res = true;
10639 if (res && !VAR_P (decl))
10641 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10642 privatized into a new VAR_DECL. */
10643 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10645 res = false;
10647 if (dump_enabled_p ())
10649 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10650 dump_printf (l_dump_flags,
10651 "potentially has improper OpenACC privatization level: %qs\n",
10652 get_tree_code_name (TREE_CODE (decl)));
10656 if (res && block && TREE_STATIC (decl))
10658 res = false;
10660 if (dump_enabled_p ())
10662 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10663 dump_printf (l_dump_flags,
10664 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10665 "static");
10669 if (res && block && DECL_EXTERNAL (decl))
10671 res = false;
10673 if (dump_enabled_p ())
10675 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10676 dump_printf (l_dump_flags,
10677 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10678 "external");
10682 if (res && !TREE_ADDRESSABLE (decl))
10684 res = false;
10686 if (dump_enabled_p ())
10688 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10689 dump_printf (l_dump_flags,
10690 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10691 "not addressable");
10695 if (res)
10697 if (dump_enabled_p ())
10699 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10700 dump_printf (l_dump_flags,
10701 "is candidate for adjusting OpenACC privatization level\n");
10705 if (dump_file && (dump_flags & TDF_DETAILS))
10707 print_generic_decl (dump_file, decl, dump_flags);
10708 fprintf (dump_file, "\n");
10711 return res;
10714 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10715 CTX. */
10717 static void
10718 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10720 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10721 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10723 tree decl = OMP_CLAUSE_DECL (c);
10725 tree new_decl = lookup_decl (decl, ctx);
10727 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10728 new_decl))
10729 continue;
10731 gcc_checking_assert
10732 (!ctx->oacc_privatization_candidates.contains (new_decl));
10733 ctx->oacc_privatization_candidates.safe_push (new_decl);
10737 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10738 CTX. */
10740 static void
10741 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10743 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10745 tree new_decl = lookup_decl (decl, ctx);
10746 gcc_checking_assert (new_decl == decl);
10748 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10749 new_decl))
10750 continue;
10752 gcc_checking_assert
10753 (!ctx->oacc_privatization_candidates.contains (new_decl));
10754 ctx->oacc_privatization_candidates.safe_push (new_decl);
10758 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10760 static tree
10761 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10762 struct walk_stmt_info *wi)
10764 gimple *stmt = gsi_stmt (*gsi_p);
10766 *handled_ops_p = true;
10767 switch (gimple_code (stmt))
10769 WALK_SUBSTMTS;
10771 case GIMPLE_OMP_FOR:
10772 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10773 && gimple_omp_for_combined_into_p (stmt))
10774 *handled_ops_p = false;
10775 break;
10777 case GIMPLE_OMP_SCAN:
10778 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10779 return integer_zero_node;
10780 default:
10781 break;
10783 return NULL;
10786 /* Helper function for lower_omp_for, add transformations for a worksharing
10787 loop with scan directives inside of it.
10788 For worksharing loop not combined with simd, transform:
10789 #pragma omp for reduction(inscan,+:r) private(i)
10790 for (i = 0; i < n; i = i + 1)
10793 update (r);
10795 #pragma omp scan inclusive(r)
10797 use (r);
10801 into two worksharing loops + code to merge results:
10803 num_threads = omp_get_num_threads ();
10804 thread_num = omp_get_thread_num ();
10805 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10806 <D.2099>:
10807 var2 = r;
10808 goto <D.2101>;
10809 <D.2100>:
10810 // For UDRs this is UDR init, or if ctors are needed, copy from
10811 // var3 that has been constructed to contain the neutral element.
10812 var2 = 0;
10813 <D.2101>:
10814 ivar = 0;
10815 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10816 // a shared array with num_threads elements and rprivb to a local array
10817 // number of elements equal to the number of (contiguous) iterations the
10818 // current thread will perform. controlb and controlp variables are
10819 // temporaries to handle deallocation of rprivb at the end of second
10820 // GOMP_FOR.
10821 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10822 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10823 for (i = 0; i < n; i = i + 1)
10826 // For UDRs this is UDR init or copy from var3.
10827 r = 0;
10828 // This is the input phase from user code.
10829 update (r);
10832 // For UDRs this is UDR merge.
10833 var2 = var2 + r;
10834 // Rather than handing it over to the user, save to local thread's
10835 // array.
10836 rprivb[ivar] = var2;
10837 // For exclusive scan, the above two statements are swapped.
10838 ivar = ivar + 1;
10841 // And remember the final value from this thread's into the shared
10842 // rpriva array.
10843 rpriva[(sizetype) thread_num] = var2;
10844 // If more than one thread, compute using Work-Efficient prefix sum
10845 // the inclusive parallel scan of the rpriva array.
10846 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10847 <D.2102>:
10848 GOMP_barrier ();
10849 down = 0;
10850 k = 1;
10851 num_threadsu = (unsigned int) num_threads;
10852 thread_numup1 = (unsigned int) thread_num + 1;
10853 <D.2108>:
10854 twok = k << 1;
10855 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10856 <D.2110>:
10857 down = 4294967295;
10858 k = k >> 1;
10859 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10860 <D.2112>:
10861 k = k >> 1;
10862 <D.2111>:
10863 twok = k << 1;
10864 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10865 mul = REALPART_EXPR <cplx>;
10866 ovf = IMAGPART_EXPR <cplx>;
10867 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10868 <D.2116>:
10869 andv = k & down;
10870 andvm1 = andv + 4294967295;
10871 l = mul + andvm1;
10872 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10873 <D.2120>:
10874 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10875 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10876 rpriva[l] = rpriva[l - k] + rpriva[l];
10877 <D.2117>:
10878 if (down == 0) goto <D.2121>; else goto <D.2122>;
10879 <D.2121>:
10880 k = k << 1;
10881 goto <D.2123>;
10882 <D.2122>:
10883 k = k >> 1;
10884 <D.2123>:
10885 GOMP_barrier ();
10886 if (k != 0) goto <D.2108>; else goto <D.2103>;
10887 <D.2103>:
10888 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10889 <D.2124>:
10890 // For UDRs this is UDR init or copy from var3.
10891 var2 = 0;
10892 goto <D.2126>;
10893 <D.2125>:
10894 var2 = rpriva[thread_num - 1];
10895 <D.2126>:
10896 ivar = 0;
10897 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10898 reduction(inscan,+:r) private(i)
10899 for (i = 0; i < n; i = i + 1)
10902 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10903 r = var2 + rprivb[ivar];
10906 // This is the scan phase from user code.
10907 use (r);
10908 // Plus a bump of the iterator.
10909 ivar = ivar + 1;
10911 } */
10913 static void
10914 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10915 struct omp_for_data *fd, omp_context *ctx)
10917 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10918 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10920 gimple_seq body = gimple_omp_body (stmt);
10921 gimple_stmt_iterator input1_gsi = gsi_none ();
10922 struct walk_stmt_info wi;
10923 memset (&wi, 0, sizeof (wi));
10924 wi.val_only = true;
10925 wi.info = (void *) &input1_gsi;
10926 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10927 gcc_assert (!gsi_end_p (input1_gsi));
10929 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10930 gimple_stmt_iterator gsi = input1_gsi;
10931 gsi_next (&gsi);
10932 gimple_stmt_iterator scan1_gsi = gsi;
10933 gimple *scan_stmt1 = gsi_stmt (gsi);
10934 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10936 gimple_seq input_body = gimple_omp_body (input_stmt1);
10937 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10938 gimple_omp_set_body (input_stmt1, NULL);
10939 gimple_omp_set_body (scan_stmt1, NULL);
10940 gimple_omp_set_body (stmt, NULL);
10942 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10943 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10944 gimple_omp_set_body (stmt, body);
10945 gimple_omp_set_body (input_stmt1, input_body);
10947 gimple_stmt_iterator input2_gsi = gsi_none ();
10948 memset (&wi, 0, sizeof (wi));
10949 wi.val_only = true;
10950 wi.info = (void *) &input2_gsi;
10951 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10952 gcc_assert (!gsi_end_p (input2_gsi));
10954 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10955 gsi = input2_gsi;
10956 gsi_next (&gsi);
10957 gimple_stmt_iterator scan2_gsi = gsi;
10958 gimple *scan_stmt2 = gsi_stmt (gsi);
10959 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10960 gimple_omp_set_body (scan_stmt2, scan_body);
10962 gimple_stmt_iterator input3_gsi = gsi_none ();
10963 gimple_stmt_iterator scan3_gsi = gsi_none ();
10964 gimple_stmt_iterator input4_gsi = gsi_none ();
10965 gimple_stmt_iterator scan4_gsi = gsi_none ();
10966 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10967 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10968 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10969 if (is_for_simd)
10971 memset (&wi, 0, sizeof (wi));
10972 wi.val_only = true;
10973 wi.info = (void *) &input3_gsi;
10974 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10975 gcc_assert (!gsi_end_p (input3_gsi));
10977 input_stmt3 = gsi_stmt (input3_gsi);
10978 gsi = input3_gsi;
10979 gsi_next (&gsi);
10980 scan3_gsi = gsi;
10981 scan_stmt3 = gsi_stmt (gsi);
10982 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10984 memset (&wi, 0, sizeof (wi));
10985 wi.val_only = true;
10986 wi.info = (void *) &input4_gsi;
10987 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10988 gcc_assert (!gsi_end_p (input4_gsi));
10990 input_stmt4 = gsi_stmt (input4_gsi);
10991 gsi = input4_gsi;
10992 gsi_next (&gsi);
10993 scan4_gsi = gsi;
10994 scan_stmt4 = gsi_stmt (gsi);
10995 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10997 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10998 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
11001 tree num_threads = create_tmp_var (integer_type_node);
11002 tree thread_num = create_tmp_var (integer_type_node);
11003 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
11004 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
11005 gimple *g = gimple_build_call (nthreads_decl, 0);
11006 gimple_call_set_lhs (g, num_threads);
11007 gimple_seq_add_stmt (body_p, g);
11008 g = gimple_build_call (threadnum_decl, 0);
11009 gimple_call_set_lhs (g, thread_num);
11010 gimple_seq_add_stmt (body_p, g);
11012 tree ivar = create_tmp_var (sizetype);
11013 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
11014 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
11015 tree k = create_tmp_var (unsigned_type_node);
11016 tree l = create_tmp_var (unsigned_type_node);
11018 gimple_seq clist = NULL, mdlist = NULL;
11019 gimple_seq thr01_list = NULL, thrn1_list = NULL;
11020 gimple_seq thr02_list = NULL, thrn2_list = NULL;
11021 gimple_seq scan1_list = NULL, input2_list = NULL;
11022 gimple_seq last_list = NULL, reduc_list = NULL;
11023 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11024 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11025 && OMP_CLAUSE_REDUCTION_INSCAN (c))
11027 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11028 tree var = OMP_CLAUSE_DECL (c);
11029 tree new_var = lookup_decl (var, ctx);
11030 tree var3 = NULL_TREE;
11031 tree new_vard = new_var;
11032 if (omp_privatize_by_reference (var))
11033 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
11034 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11036 var3 = maybe_lookup_decl (new_vard, ctx);
11037 if (var3 == new_vard)
11038 var3 = NULL_TREE;
11041 tree ptype = build_pointer_type (TREE_TYPE (new_var));
11042 tree rpriva = create_tmp_var (ptype);
11043 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11044 OMP_CLAUSE_DECL (nc) = rpriva;
11045 *cp1 = nc;
11046 cp1 = &OMP_CLAUSE_CHAIN (nc);
11048 tree rprivb = create_tmp_var (ptype);
11049 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11050 OMP_CLAUSE_DECL (nc) = rprivb;
11051 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
11052 *cp1 = nc;
11053 cp1 = &OMP_CLAUSE_CHAIN (nc);
11055 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
11056 if (new_vard != new_var)
11057 TREE_ADDRESSABLE (var2) = 1;
11058 gimple_add_tmp_var (var2);
11060 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11061 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11062 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11063 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11064 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11066 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11067 thread_num, integer_minus_one_node);
11068 x = fold_convert_loc (clause_loc, sizetype, x);
11069 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11070 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11071 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11072 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11074 x = fold_convert_loc (clause_loc, sizetype, l);
11075 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11076 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11077 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11078 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11080 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11081 x = fold_convert_loc (clause_loc, sizetype, x);
11082 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11083 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11084 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11085 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11087 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11088 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11089 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11090 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11092 tree var4 = is_for_simd ? new_var : var2;
11093 tree var5 = NULL_TREE, var6 = NULL_TREE;
11094 if (is_for_simd)
11096 var5 = lookup_decl (var, input_simd_ctx);
11097 var6 = lookup_decl (var, scan_simd_ctx);
11098 if (new_vard != new_var)
11100 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11101 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11104 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11106 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11107 tree val = var2;
11109 x = lang_hooks.decls.omp_clause_default_ctor
11110 (c, var2, build_outer_var_ref (var, ctx));
11111 if (x)
11112 gimplify_and_add (x, &clist);
11114 x = build_outer_var_ref (var, ctx);
11115 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11117 gimplify_and_add (x, &thr01_list);
11119 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11120 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11121 if (var3)
11123 x = unshare_expr (var4);
11124 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11125 gimplify_and_add (x, &thrn1_list);
11126 x = unshare_expr (var4);
11127 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11128 gimplify_and_add (x, &thr02_list);
11130 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11132 /* Otherwise, assign to it the identity element. */
11133 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11134 tseq = copy_gimple_seq_and_replace_locals (tseq);
11135 if (!is_for_simd)
11137 if (new_vard != new_var)
11138 val = build_fold_addr_expr_loc (clause_loc, val);
11139 SET_DECL_VALUE_EXPR (new_vard, val);
11140 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11142 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11143 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11144 lower_omp (&tseq, ctx);
11145 gimple_seq_add_seq (&thrn1_list, tseq);
11146 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11147 lower_omp (&tseq, ctx);
11148 gimple_seq_add_seq (&thr02_list, tseq);
11149 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11150 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11151 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11152 if (y)
11153 SET_DECL_VALUE_EXPR (new_vard, y);
11154 else
11156 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11157 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11161 x = unshare_expr (var4);
11162 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11163 gimplify_and_add (x, &thrn2_list);
11165 if (is_for_simd)
11167 x = unshare_expr (rprivb_ref);
11168 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11169 gimplify_and_add (x, &scan1_list);
11171 else
11173 if (ctx->scan_exclusive)
11175 x = unshare_expr (rprivb_ref);
11176 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11177 gimplify_and_add (x, &scan1_list);
11180 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11181 tseq = copy_gimple_seq_and_replace_locals (tseq);
11182 SET_DECL_VALUE_EXPR (placeholder, var2);
11183 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11184 lower_omp (&tseq, ctx);
11185 gimple_seq_add_seq (&scan1_list, tseq);
11187 if (ctx->scan_inclusive)
11189 x = unshare_expr (rprivb_ref);
11190 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11191 gimplify_and_add (x, &scan1_list);
11195 x = unshare_expr (rpriva_ref);
11196 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11197 unshare_expr (var4));
11198 gimplify_and_add (x, &mdlist);
11200 x = unshare_expr (is_for_simd ? var6 : new_var);
11201 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11202 gimplify_and_add (x, &input2_list);
11204 val = rprivb_ref;
11205 if (new_vard != new_var)
11206 val = build_fold_addr_expr_loc (clause_loc, val);
11208 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11209 tseq = copy_gimple_seq_and_replace_locals (tseq);
11210 SET_DECL_VALUE_EXPR (new_vard, val);
11211 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11212 if (is_for_simd)
11214 SET_DECL_VALUE_EXPR (placeholder, var6);
11215 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11217 else
11218 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11219 lower_omp (&tseq, ctx);
11220 if (y)
11221 SET_DECL_VALUE_EXPR (new_vard, y);
11222 else
11224 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11225 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11227 if (!is_for_simd)
11229 SET_DECL_VALUE_EXPR (placeholder, new_var);
11230 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11231 lower_omp (&tseq, ctx);
11233 gimple_seq_add_seq (&input2_list, tseq);
11235 x = build_outer_var_ref (var, ctx);
11236 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11237 gimplify_and_add (x, &last_list);
11239 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11240 gimplify_and_add (x, &reduc_list);
11241 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11242 tseq = copy_gimple_seq_and_replace_locals (tseq);
11243 val = rprival_ref;
11244 if (new_vard != new_var)
11245 val = build_fold_addr_expr_loc (clause_loc, val);
11246 SET_DECL_VALUE_EXPR (new_vard, val);
11247 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11248 SET_DECL_VALUE_EXPR (placeholder, var2);
11249 lower_omp (&tseq, ctx);
11250 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11251 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11252 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11253 if (y)
11254 SET_DECL_VALUE_EXPR (new_vard, y);
11255 else
11257 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11258 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11260 gimple_seq_add_seq (&reduc_list, tseq);
11261 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11262 gimplify_and_add (x, &reduc_list);
11264 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11265 if (x)
11266 gimplify_and_add (x, dlist);
11268 else
11270 x = build_outer_var_ref (var, ctx);
11271 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11273 x = omp_reduction_init (c, TREE_TYPE (new_var));
11274 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11275 &thrn1_list);
11276 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11278 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11280 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11281 if (code == MINUS_EXPR)
11282 code = PLUS_EXPR;
11284 if (is_for_simd)
11285 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11286 else
11288 if (ctx->scan_exclusive)
11289 gimplify_assign (unshare_expr (rprivb_ref), var2,
11290 &scan1_list);
11291 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11292 gimplify_assign (var2, x, &scan1_list);
11293 if (ctx->scan_inclusive)
11294 gimplify_assign (unshare_expr (rprivb_ref), var2,
11295 &scan1_list);
11298 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11299 &mdlist);
11301 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11302 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11304 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11305 &last_list);
11307 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11308 unshare_expr (rprival_ref));
11309 gimplify_assign (rprival_ref, x, &reduc_list);
11313 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11314 gimple_seq_add_stmt (&scan1_list, g);
11315 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11316 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11317 ? scan_stmt4 : scan_stmt2), g);
11319 tree controlb = create_tmp_var (boolean_type_node);
11320 tree controlp = create_tmp_var (ptr_type_node);
11321 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11322 OMP_CLAUSE_DECL (nc) = controlb;
11323 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11324 *cp1 = nc;
11325 cp1 = &OMP_CLAUSE_CHAIN (nc);
11326 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11327 OMP_CLAUSE_DECL (nc) = controlp;
11328 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11329 *cp1 = nc;
11330 cp1 = &OMP_CLAUSE_CHAIN (nc);
11331 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11332 OMP_CLAUSE_DECL (nc) = controlb;
11333 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11334 *cp2 = nc;
11335 cp2 = &OMP_CLAUSE_CHAIN (nc);
11336 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11337 OMP_CLAUSE_DECL (nc) = controlp;
11338 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11339 *cp2 = nc;
11340 cp2 = &OMP_CLAUSE_CHAIN (nc);
11342 *cp1 = gimple_omp_for_clauses (stmt);
11343 gimple_omp_for_set_clauses (stmt, new_clauses1);
11344 *cp2 = gimple_omp_for_clauses (new_stmt);
11345 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11347 if (is_for_simd)
11349 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11350 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11352 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11353 GSI_SAME_STMT);
11354 gsi_remove (&input3_gsi, true);
11355 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11356 GSI_SAME_STMT);
11357 gsi_remove (&scan3_gsi, true);
11358 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11359 GSI_SAME_STMT);
11360 gsi_remove (&input4_gsi, true);
11361 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11362 GSI_SAME_STMT);
11363 gsi_remove (&scan4_gsi, true);
11365 else
11367 gimple_omp_set_body (scan_stmt1, scan1_list);
11368 gimple_omp_set_body (input_stmt2, input2_list);
11371 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11372 GSI_SAME_STMT);
11373 gsi_remove (&input1_gsi, true);
11374 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11375 GSI_SAME_STMT);
11376 gsi_remove (&scan1_gsi, true);
11377 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11378 GSI_SAME_STMT);
11379 gsi_remove (&input2_gsi, true);
11380 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11381 GSI_SAME_STMT);
11382 gsi_remove (&scan2_gsi, true);
11384 gimple_seq_add_seq (body_p, clist);
11386 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11387 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11388 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11389 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11390 gimple_seq_add_stmt (body_p, g);
11391 g = gimple_build_label (lab1);
11392 gimple_seq_add_stmt (body_p, g);
11393 gimple_seq_add_seq (body_p, thr01_list);
11394 g = gimple_build_goto (lab3);
11395 gimple_seq_add_stmt (body_p, g);
11396 g = gimple_build_label (lab2);
11397 gimple_seq_add_stmt (body_p, g);
11398 gimple_seq_add_seq (body_p, thrn1_list);
11399 g = gimple_build_label (lab3);
11400 gimple_seq_add_stmt (body_p, g);
11402 g = gimple_build_assign (ivar, size_zero_node);
11403 gimple_seq_add_stmt (body_p, g);
11405 gimple_seq_add_stmt (body_p, stmt);
11406 gimple_seq_add_seq (body_p, body);
11407 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11408 fd->loop.v));
11410 g = gimple_build_omp_return (true);
11411 gimple_seq_add_stmt (body_p, g);
11412 gimple_seq_add_seq (body_p, mdlist);
11414 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11415 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11416 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11417 gimple_seq_add_stmt (body_p, g);
11418 g = gimple_build_label (lab1);
11419 gimple_seq_add_stmt (body_p, g);
11421 g = omp_build_barrier (NULL);
11422 gimple_seq_add_stmt (body_p, g);
11424 tree down = create_tmp_var (unsigned_type_node);
11425 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11426 gimple_seq_add_stmt (body_p, g);
11428 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11429 gimple_seq_add_stmt (body_p, g);
11431 tree num_threadsu = create_tmp_var (unsigned_type_node);
11432 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11433 gimple_seq_add_stmt (body_p, g);
11435 tree thread_numu = create_tmp_var (unsigned_type_node);
11436 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11437 gimple_seq_add_stmt (body_p, g);
11439 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11440 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11441 build_int_cst (unsigned_type_node, 1));
11442 gimple_seq_add_stmt (body_p, g);
11444 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11445 g = gimple_build_label (lab3);
11446 gimple_seq_add_stmt (body_p, g);
11448 tree twok = create_tmp_var (unsigned_type_node);
11449 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11450 gimple_seq_add_stmt (body_p, g);
11452 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11453 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11454 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11455 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11456 gimple_seq_add_stmt (body_p, g);
11457 g = gimple_build_label (lab4);
11458 gimple_seq_add_stmt (body_p, g);
11459 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11460 gimple_seq_add_stmt (body_p, g);
11461 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11462 gimple_seq_add_stmt (body_p, g);
11464 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11465 gimple_seq_add_stmt (body_p, g);
11466 g = gimple_build_label (lab6);
11467 gimple_seq_add_stmt (body_p, g);
11469 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11470 gimple_seq_add_stmt (body_p, g);
11472 g = gimple_build_label (lab5);
11473 gimple_seq_add_stmt (body_p, g);
11475 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11476 gimple_seq_add_stmt (body_p, g);
11478 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11479 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11480 gimple_call_set_lhs (g, cplx);
11481 gimple_seq_add_stmt (body_p, g);
11482 tree mul = create_tmp_var (unsigned_type_node);
11483 g = gimple_build_assign (mul, REALPART_EXPR,
11484 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11485 gimple_seq_add_stmt (body_p, g);
11486 tree ovf = create_tmp_var (unsigned_type_node);
11487 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11488 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11489 gimple_seq_add_stmt (body_p, g);
11491 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11492 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11493 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11494 lab7, lab8);
11495 gimple_seq_add_stmt (body_p, g);
11496 g = gimple_build_label (lab7);
11497 gimple_seq_add_stmt (body_p, g);
11499 tree andv = create_tmp_var (unsigned_type_node);
11500 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11501 gimple_seq_add_stmt (body_p, g);
11502 tree andvm1 = create_tmp_var (unsigned_type_node);
11503 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11504 build_minus_one_cst (unsigned_type_node));
11505 gimple_seq_add_stmt (body_p, g);
11507 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11508 gimple_seq_add_stmt (body_p, g);
11510 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11511 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11512 gimple_seq_add_stmt (body_p, g);
11513 g = gimple_build_label (lab9);
11514 gimple_seq_add_stmt (body_p, g);
11515 gimple_seq_add_seq (body_p, reduc_list);
11516 g = gimple_build_label (lab8);
11517 gimple_seq_add_stmt (body_p, g);
11519 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11520 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11521 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11522 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11523 lab10, lab11);
11524 gimple_seq_add_stmt (body_p, g);
11525 g = gimple_build_label (lab10);
11526 gimple_seq_add_stmt (body_p, g);
11527 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11528 gimple_seq_add_stmt (body_p, g);
11529 g = gimple_build_goto (lab12);
11530 gimple_seq_add_stmt (body_p, g);
11531 g = gimple_build_label (lab11);
11532 gimple_seq_add_stmt (body_p, g);
11533 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11534 gimple_seq_add_stmt (body_p, g);
11535 g = gimple_build_label (lab12);
11536 gimple_seq_add_stmt (body_p, g);
11538 g = omp_build_barrier (NULL);
11539 gimple_seq_add_stmt (body_p, g);
11541 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11542 lab3, lab2);
11543 gimple_seq_add_stmt (body_p, g);
11545 g = gimple_build_label (lab2);
11546 gimple_seq_add_stmt (body_p, g);
11548 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11549 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11550 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11551 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11552 gimple_seq_add_stmt (body_p, g);
11553 g = gimple_build_label (lab1);
11554 gimple_seq_add_stmt (body_p, g);
11555 gimple_seq_add_seq (body_p, thr02_list);
11556 g = gimple_build_goto (lab3);
11557 gimple_seq_add_stmt (body_p, g);
11558 g = gimple_build_label (lab2);
11559 gimple_seq_add_stmt (body_p, g);
11560 gimple_seq_add_seq (body_p, thrn2_list);
11561 g = gimple_build_label (lab3);
11562 gimple_seq_add_stmt (body_p, g);
11564 g = gimple_build_assign (ivar, size_zero_node);
11565 gimple_seq_add_stmt (body_p, g);
11566 gimple_seq_add_stmt (body_p, new_stmt);
11567 gimple_seq_add_seq (body_p, new_body);
11569 gimple_seq new_dlist = NULL;
11570 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11571 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11572 tree num_threadsm1 = create_tmp_var (integer_type_node);
11573 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11574 integer_minus_one_node);
11575 gimple_seq_add_stmt (&new_dlist, g);
11576 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11577 gimple_seq_add_stmt (&new_dlist, g);
11578 g = gimple_build_label (lab1);
11579 gimple_seq_add_stmt (&new_dlist, g);
11580 gimple_seq_add_seq (&new_dlist, last_list);
11581 g = gimple_build_label (lab2);
11582 gimple_seq_add_stmt (&new_dlist, g);
11583 gimple_seq_add_seq (&new_dlist, *dlist);
11584 *dlist = new_dlist;
11587 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11588 the addresses of variables to be made private at the surrounding
11589 parallelism level. Such functions appear in the gimple code stream in two
11590 forms, e.g. for a partitioned loop:
11592 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11593 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11594 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11595 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11597 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11598 not as part of a HEAD_MARK sequence:
11600 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11602 For such stand-alone appearances, the 3rd argument is always 0, denoting
11603 gang partitioning. */
11605 static gcall *
11606 lower_oacc_private_marker (omp_context *ctx)
11608 if (ctx->oacc_privatization_candidates.length () == 0)
11609 return NULL;
11611 auto_vec<tree, 5> args;
11613 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11614 args.quick_push (integer_zero_node);
11615 args.quick_push (integer_minus_one_node);
11617 int i;
11618 tree decl;
11619 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11621 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11622 tree addr = build_fold_addr_expr (decl);
11623 args.safe_push (addr);
11626 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11629 /* Lower code for an OMP loop directive. */
11631 static void
11632 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11634 tree *rhs_p, block;
11635 struct omp_for_data fd, *fdp = NULL;
11636 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11637 gbind *new_stmt;
11638 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11639 gimple_seq cnt_list = NULL, clist = NULL;
11640 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11641 size_t i;
11643 push_gimplify_context ();
11645 if (is_gimple_omp_oacc (ctx->stmt))
11646 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11648 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11650 block = make_node (BLOCK);
11651 new_stmt = gimple_build_bind (NULL, NULL, block);
11652 /* Replace at gsi right away, so that 'stmt' is no member
11653 of a sequence anymore as we're going to add to a different
11654 one below. */
11655 gsi_replace (gsi_p, new_stmt, true);
11657 /* Move declaration of temporaries in the loop body before we make
11658 it go away. */
11659 omp_for_body = gimple_omp_body (stmt);
11660 if (!gimple_seq_empty_p (omp_for_body)
11661 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11663 gbind *inner_bind
11664 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11665 tree vars = gimple_bind_vars (inner_bind);
11666 if (is_gimple_omp_oacc (ctx->stmt))
11667 oacc_privatization_scan_decl_chain (ctx, vars);
11668 gimple_bind_append_vars (new_stmt, vars);
11669 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11670 keep them on the inner_bind and it's block. */
11671 gimple_bind_set_vars (inner_bind, NULL_TREE);
11672 if (gimple_bind_block (inner_bind))
11673 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11676 if (gimple_omp_for_combined_into_p (stmt))
11678 omp_extract_for_data (stmt, &fd, NULL);
11679 fdp = &fd;
11681 /* We need two temporaries with fd.loop.v type (istart/iend)
11682 and then (fd.collapse - 1) temporaries with the same
11683 type for count2 ... countN-1 vars if not constant. */
11684 size_t count = 2;
11685 tree type = fd.iter_type;
11686 if (fd.collapse > 1
11687 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11688 count += fd.collapse - 1;
11689 size_t count2 = 0;
11690 tree type2 = NULL_TREE;
11691 bool taskreg_for
11692 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11693 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11694 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11695 tree simtc = NULL;
11696 tree clauses = *pc;
11697 if (fd.collapse > 1
11698 && fd.non_rect
11699 && fd.last_nonrect == fd.first_nonrect + 1
11700 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11701 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11702 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11704 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11705 type2 = TREE_TYPE (v);
11706 count++;
11707 count2 = 3;
11709 if (taskreg_for)
11710 outerc
11711 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11712 OMP_CLAUSE__LOOPTEMP_);
11713 if (ctx->simt_stmt)
11714 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11715 OMP_CLAUSE__LOOPTEMP_);
11716 for (i = 0; i < count + count2; i++)
11718 tree temp;
11719 if (taskreg_for)
11721 gcc_assert (outerc);
11722 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11723 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11724 OMP_CLAUSE__LOOPTEMP_);
11726 else
11728 /* If there are 2 adjacent SIMD stmts, one with _simt_
11729 clause, another without, make sure they have the same
11730 decls in _looptemp_ clauses, because the outer stmt
11731 they are combined into will look up just one inner_stmt. */
11732 if (ctx->simt_stmt)
11733 temp = OMP_CLAUSE_DECL (simtc);
11734 else
11735 temp = create_tmp_var (i >= count ? type2 : type);
11736 insert_decl_map (&ctx->outer->cb, temp, temp);
11738 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11739 OMP_CLAUSE_DECL (*pc) = temp;
11740 pc = &OMP_CLAUSE_CHAIN (*pc);
11741 if (ctx->simt_stmt)
11742 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11743 OMP_CLAUSE__LOOPTEMP_);
11745 *pc = clauses;
11748 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11749 dlist = NULL;
11750 body = NULL;
11751 tree rclauses
11752 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11753 OMP_CLAUSE_REDUCTION);
11754 tree rtmp = NULL_TREE;
11755 if (rclauses)
11757 tree type = build_pointer_type (pointer_sized_int_node);
11758 tree temp = create_tmp_var (type);
11759 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11760 OMP_CLAUSE_DECL (c) = temp;
11761 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11762 gimple_omp_for_set_clauses (stmt, c);
11763 lower_omp_task_reductions (ctx, OMP_FOR,
11764 gimple_omp_for_clauses (stmt),
11765 &tred_ilist, &tred_dlist);
11766 rclauses = c;
11767 rtmp = make_ssa_name (type);
11768 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11771 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11772 ctx);
11774 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11775 fdp);
11776 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11777 gimple_omp_for_pre_body (stmt));
11779 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11781 gcall *private_marker = NULL;
11782 if (is_gimple_omp_oacc (ctx->stmt)
11783 && !gimple_seq_empty_p (omp_for_body))
11784 private_marker = lower_oacc_private_marker (ctx);
11786 /* Lower the header expressions. At this point, we can assume that
11787 the header is of the form:
11789 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11791 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11792 using the .omp_data_s mapping, if needed. */
11793 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11795 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11796 if (TREE_CODE (*rhs_p) == TREE_VEC)
11798 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11799 TREE_VEC_ELT (*rhs_p, 1)
11800 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11801 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11802 TREE_VEC_ELT (*rhs_p, 2)
11803 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11805 else if (!is_gimple_min_invariant (*rhs_p))
11806 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11807 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11808 recompute_tree_invariant_for_addr_expr (*rhs_p);
11810 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11811 if (TREE_CODE (*rhs_p) == TREE_VEC)
11813 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11814 TREE_VEC_ELT (*rhs_p, 1)
11815 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11816 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11817 TREE_VEC_ELT (*rhs_p, 2)
11818 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11820 else if (!is_gimple_min_invariant (*rhs_p))
11821 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11822 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11823 recompute_tree_invariant_for_addr_expr (*rhs_p);
11825 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11826 if (!is_gimple_min_invariant (*rhs_p))
11827 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11829 if (rclauses)
11830 gimple_seq_add_seq (&tred_ilist, cnt_list);
11831 else
11832 gimple_seq_add_seq (&body, cnt_list);
11834 /* Once lowered, extract the bounds and clauses. */
11835 omp_extract_for_data (stmt, &fd, NULL);
11837 if (is_gimple_omp_oacc (ctx->stmt)
11838 && !ctx_in_oacc_kernels_region (ctx))
11839 lower_oacc_head_tail (gimple_location (stmt),
11840 gimple_omp_for_clauses (stmt), private_marker,
11841 &oacc_head, &oacc_tail, ctx);
11843 /* Add OpenACC partitioning and reduction markers just before the loop. */
11844 if (oacc_head)
11845 gimple_seq_add_seq (&body, oacc_head);
11847 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11849 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11850 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11851 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11852 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11854 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11855 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11856 OMP_CLAUSE_LINEAR_STEP (c)
11857 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11858 ctx);
11861 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11862 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11863 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11864 else
11866 gimple_seq_add_stmt (&body, stmt);
11867 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11870 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11871 fd.loop.v));
11873 /* After the loop, add exit clauses. */
11874 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11876 if (clist)
11878 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11879 gcall *g = gimple_build_call (fndecl, 0);
11880 gimple_seq_add_stmt (&body, g);
11881 gimple_seq_add_seq (&body, clist);
11882 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11883 g = gimple_build_call (fndecl, 0);
11884 gimple_seq_add_stmt (&body, g);
11887 if (ctx->cancellable)
11888 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11890 gimple_seq_add_seq (&body, dlist);
11892 if (rclauses)
11894 gimple_seq_add_seq (&tred_ilist, body);
11895 body = tred_ilist;
11898 body = maybe_catch_exception (body);
11900 /* Region exit marker goes at the end of the loop body. */
11901 gimple *g = gimple_build_omp_return (fd.have_nowait);
11902 gimple_seq_add_stmt (&body, g);
11904 gimple_seq_add_seq (&body, tred_dlist);
11906 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11908 if (rclauses)
11909 OMP_CLAUSE_DECL (rclauses) = rtmp;
11911 /* Add OpenACC joining and reduction markers just after the loop. */
11912 if (oacc_tail)
11913 gimple_seq_add_seq (&body, oacc_tail);
11915 pop_gimplify_context (new_stmt);
11917 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11918 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11919 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11920 if (BLOCK_VARS (block))
11921 TREE_USED (block) = 1;
11923 gimple_bind_set_body (new_stmt, body);
11924 gimple_omp_set_body (stmt, NULL);
11925 gimple_omp_for_set_pre_body (stmt, NULL);
11928 /* Callback for walk_stmts. Check if the current statement only contains
11929 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11931 static tree
11932 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11933 bool *handled_ops_p,
11934 struct walk_stmt_info *wi)
11936 int *info = (int *) wi->info;
11937 gimple *stmt = gsi_stmt (*gsi_p);
11939 *handled_ops_p = true;
11940 switch (gimple_code (stmt))
11942 WALK_SUBSTMTS;
11944 case GIMPLE_DEBUG:
11945 break;
11946 case GIMPLE_OMP_FOR:
11947 case GIMPLE_OMP_SECTIONS:
11948 *info = *info == 0 ? 1 : -1;
11949 break;
11950 default:
11951 *info = -1;
11952 break;
11954 return NULL;
11957 struct omp_taskcopy_context
11959 /* This field must be at the beginning, as we do "inheritance": Some
11960 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11961 receive a copy_body_data pointer that is up-casted to an
11962 omp_context pointer. */
11963 copy_body_data cb;
11964 omp_context *ctx;
11967 static tree
11968 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11970 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11972 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11973 return create_tmp_var (TREE_TYPE (var));
11975 return var;
11978 static tree
11979 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11981 tree name, new_fields = NULL, type, f;
11983 type = lang_hooks.types.make_type (RECORD_TYPE);
11984 name = DECL_NAME (TYPE_NAME (orig_type));
11985 name = build_decl (gimple_location (tcctx->ctx->stmt),
11986 TYPE_DECL, name, type);
11987 TYPE_NAME (type) = name;
11989 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11991 tree new_f = copy_node (f);
11992 DECL_CONTEXT (new_f) = type;
11993 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11994 TREE_CHAIN (new_f) = new_fields;
11995 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11996 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11997 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11998 &tcctx->cb, NULL);
11999 new_fields = new_f;
12000 tcctx->cb.decl_map->put (f, new_f);
12002 TYPE_FIELDS (type) = nreverse (new_fields);
12003 layout_type (type);
12004 return type;
12007 /* Create task copyfn. */
12009 static void
12010 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
12012 struct function *child_cfun;
12013 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
12014 tree record_type, srecord_type, bind, list;
12015 bool record_needs_remap = false, srecord_needs_remap = false;
12016 splay_tree_node n;
12017 struct omp_taskcopy_context tcctx;
12018 location_t loc = gimple_location (task_stmt);
12019 size_t looptempno = 0;
12021 child_fn = gimple_omp_task_copy_fn (task_stmt);
12022 task_cpyfns.safe_push (task_stmt);
12023 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
12024 gcc_assert (child_cfun->cfg == NULL);
12025 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
12027 /* Reset DECL_CONTEXT on function arguments. */
12028 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
12029 DECL_CONTEXT (t) = child_fn;
12031 /* Populate the function. */
12032 push_gimplify_context ();
12033 push_cfun (child_cfun);
12035 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12036 TREE_SIDE_EFFECTS (bind) = 1;
12037 list = NULL;
12038 DECL_SAVED_TREE (child_fn) = bind;
12039 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
12041 /* Remap src and dst argument types if needed. */
12042 record_type = ctx->record_type;
12043 srecord_type = ctx->srecord_type;
12044 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
12045 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12047 record_needs_remap = true;
12048 break;
12050 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
12051 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12053 srecord_needs_remap = true;
12054 break;
12057 if (record_needs_remap || srecord_needs_remap)
12059 memset (&tcctx, '\0', sizeof (tcctx));
12060 tcctx.cb.src_fn = ctx->cb.src_fn;
12061 tcctx.cb.dst_fn = child_fn;
12062 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12063 gcc_checking_assert (tcctx.cb.src_node);
12064 tcctx.cb.dst_node = tcctx.cb.src_node;
12065 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12066 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12067 tcctx.cb.eh_lp_nr = 0;
12068 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12069 tcctx.cb.decl_map = new hash_map<tree, tree>;
12070 tcctx.ctx = ctx;
12072 if (record_needs_remap)
12073 record_type = task_copyfn_remap_type (&tcctx, record_type);
12074 if (srecord_needs_remap)
12075 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12077 else
12078 tcctx.cb.decl_map = NULL;
12080 arg = DECL_ARGUMENTS (child_fn);
12081 TREE_TYPE (arg) = build_pointer_type (record_type);
12082 sarg = DECL_CHAIN (arg);
12083 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12085 /* First pass: initialize temporaries used in record_type and srecord_type
12086 sizes and field offsets. */
12087 if (tcctx.cb.decl_map)
12088 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12089 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12091 tree *p;
12093 decl = OMP_CLAUSE_DECL (c);
12094 p = tcctx.cb.decl_map->get (decl);
12095 if (p == NULL)
12096 continue;
12097 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12098 sf = (tree) n->value;
12099 sf = *tcctx.cb.decl_map->get (sf);
12100 src = build_simple_mem_ref_loc (loc, sarg);
12101 src = omp_build_component_ref (src, sf);
12102 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12103 append_to_statement_list (t, &list);
12106 /* Second pass: copy shared var pointers and copy construct non-VLA
12107 firstprivate vars. */
12108 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12109 switch (OMP_CLAUSE_CODE (c))
12111 splay_tree_key key;
12112 case OMP_CLAUSE_SHARED:
12113 decl = OMP_CLAUSE_DECL (c);
12114 key = (splay_tree_key) decl;
12115 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12116 key = (splay_tree_key) &DECL_UID (decl);
12117 n = splay_tree_lookup (ctx->field_map, key);
12118 if (n == NULL)
12119 break;
12120 f = (tree) n->value;
12121 if (tcctx.cb.decl_map)
12122 f = *tcctx.cb.decl_map->get (f);
12123 n = splay_tree_lookup (ctx->sfield_map, key);
12124 sf = (tree) n->value;
12125 if (tcctx.cb.decl_map)
12126 sf = *tcctx.cb.decl_map->get (sf);
12127 src = build_simple_mem_ref_loc (loc, sarg);
12128 src = omp_build_component_ref (src, sf);
12129 dst = build_simple_mem_ref_loc (loc, arg);
12130 dst = omp_build_component_ref (dst, f);
12131 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12132 append_to_statement_list (t, &list);
12133 break;
12134 case OMP_CLAUSE_REDUCTION:
12135 case OMP_CLAUSE_IN_REDUCTION:
12136 decl = OMP_CLAUSE_DECL (c);
12137 if (TREE_CODE (decl) == MEM_REF)
12139 decl = TREE_OPERAND (decl, 0);
12140 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12141 decl = TREE_OPERAND (decl, 0);
12142 if (TREE_CODE (decl) == INDIRECT_REF
12143 || TREE_CODE (decl) == ADDR_EXPR)
12144 decl = TREE_OPERAND (decl, 0);
12146 key = (splay_tree_key) decl;
12147 n = splay_tree_lookup (ctx->field_map, key);
12148 if (n == NULL)
12149 break;
12150 f = (tree) n->value;
12151 if (tcctx.cb.decl_map)
12152 f = *tcctx.cb.decl_map->get (f);
12153 n = splay_tree_lookup (ctx->sfield_map, key);
12154 sf = (tree) n->value;
12155 if (tcctx.cb.decl_map)
12156 sf = *tcctx.cb.decl_map->get (sf);
12157 src = build_simple_mem_ref_loc (loc, sarg);
12158 src = omp_build_component_ref (src, sf);
12159 if (decl != OMP_CLAUSE_DECL (c)
12160 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12161 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12162 src = build_simple_mem_ref_loc (loc, src);
12163 dst = build_simple_mem_ref_loc (loc, arg);
12164 dst = omp_build_component_ref (dst, f);
12165 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12166 append_to_statement_list (t, &list);
12167 break;
12168 case OMP_CLAUSE__LOOPTEMP_:
12169 /* Fields for first two _looptemp_ clauses are initialized by
12170 GOMP_taskloop*, the rest are handled like firstprivate. */
12171 if (looptempno < 2)
12173 looptempno++;
12174 break;
12176 /* FALLTHRU */
12177 case OMP_CLAUSE__REDUCTEMP_:
12178 case OMP_CLAUSE_FIRSTPRIVATE:
12179 decl = OMP_CLAUSE_DECL (c);
12180 if (is_variable_sized (decl))
12181 break;
12182 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12183 if (n == NULL)
12184 break;
12185 f = (tree) n->value;
12186 if (tcctx.cb.decl_map)
12187 f = *tcctx.cb.decl_map->get (f);
12188 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12189 if (n != NULL)
12191 sf = (tree) n->value;
12192 if (tcctx.cb.decl_map)
12193 sf = *tcctx.cb.decl_map->get (sf);
12194 src = build_simple_mem_ref_loc (loc, sarg);
12195 src = omp_build_component_ref (src, sf);
12196 if (use_pointer_for_field (decl, NULL)
12197 || omp_privatize_by_reference (decl))
12198 src = build_simple_mem_ref_loc (loc, src);
12200 else
12201 src = decl;
12202 dst = build_simple_mem_ref_loc (loc, arg);
12203 dst = omp_build_component_ref (dst, f);
12204 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12205 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12206 else
12208 if (ctx->allocate_map)
12209 if (tree *allocatorp = ctx->allocate_map->get (decl))
12211 tree allocator = *allocatorp;
12212 HOST_WIDE_INT ialign = 0;
12213 if (TREE_CODE (allocator) == TREE_LIST)
12215 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12216 allocator = TREE_PURPOSE (allocator);
12218 if (TREE_CODE (allocator) != INTEGER_CST)
12220 n = splay_tree_lookup (ctx->sfield_map,
12221 (splay_tree_key) allocator);
12222 allocator = (tree) n->value;
12223 if (tcctx.cb.decl_map)
12224 allocator = *tcctx.cb.decl_map->get (allocator);
12225 tree a = build_simple_mem_ref_loc (loc, sarg);
12226 allocator = omp_build_component_ref (a, allocator);
12228 allocator = fold_convert (pointer_sized_int_node, allocator);
12229 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12230 tree align = build_int_cst (size_type_node,
12231 MAX (ialign,
12232 DECL_ALIGN_UNIT (decl)));
12233 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12234 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12235 allocator);
12236 ptr = fold_convert (TREE_TYPE (dst), ptr);
12237 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12238 append_to_statement_list (t, &list);
12239 dst = build_simple_mem_ref_loc (loc, dst);
12241 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12243 append_to_statement_list (t, &list);
12244 break;
12245 case OMP_CLAUSE_PRIVATE:
12246 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12247 break;
12248 decl = OMP_CLAUSE_DECL (c);
12249 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12250 f = (tree) n->value;
12251 if (tcctx.cb.decl_map)
12252 f = *tcctx.cb.decl_map->get (f);
12253 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12254 if (n != NULL)
12256 sf = (tree) n->value;
12257 if (tcctx.cb.decl_map)
12258 sf = *tcctx.cb.decl_map->get (sf);
12259 src = build_simple_mem_ref_loc (loc, sarg);
12260 src = omp_build_component_ref (src, sf);
12261 if (use_pointer_for_field (decl, NULL))
12262 src = build_simple_mem_ref_loc (loc, src);
12264 else
12265 src = decl;
12266 dst = build_simple_mem_ref_loc (loc, arg);
12267 dst = omp_build_component_ref (dst, f);
12268 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12269 append_to_statement_list (t, &list);
12270 break;
12271 default:
12272 break;
12275 /* Last pass: handle VLA firstprivates. */
12276 if (tcctx.cb.decl_map)
12277 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12278 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12280 tree ind, ptr, df;
12282 decl = OMP_CLAUSE_DECL (c);
12283 if (!is_variable_sized (decl))
12284 continue;
12285 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12286 if (n == NULL)
12287 continue;
12288 f = (tree) n->value;
12289 f = *tcctx.cb.decl_map->get (f);
12290 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12291 ind = DECL_VALUE_EXPR (decl);
12292 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12293 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12294 n = splay_tree_lookup (ctx->sfield_map,
12295 (splay_tree_key) TREE_OPERAND (ind, 0));
12296 sf = (tree) n->value;
12297 sf = *tcctx.cb.decl_map->get (sf);
12298 src = build_simple_mem_ref_loc (loc, sarg);
12299 src = omp_build_component_ref (src, sf);
12300 src = build_simple_mem_ref_loc (loc, src);
12301 dst = build_simple_mem_ref_loc (loc, arg);
12302 dst = omp_build_component_ref (dst, f);
12303 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12304 append_to_statement_list (t, &list);
12305 n = splay_tree_lookup (ctx->field_map,
12306 (splay_tree_key) TREE_OPERAND (ind, 0));
12307 df = (tree) n->value;
12308 df = *tcctx.cb.decl_map->get (df);
12309 ptr = build_simple_mem_ref_loc (loc, arg);
12310 ptr = omp_build_component_ref (ptr, df);
12311 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12312 build_fold_addr_expr_loc (loc, dst));
12313 append_to_statement_list (t, &list);
12316 t = build1 (RETURN_EXPR, void_type_node, NULL);
12317 append_to_statement_list (t, &list);
12319 if (tcctx.cb.decl_map)
12320 delete tcctx.cb.decl_map;
12321 pop_gimplify_context (NULL);
12322 BIND_EXPR_BODY (bind) = list;
12323 pop_cfun ();
12326 static void
12327 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12329 tree c, clauses;
12330 gimple *g;
12331 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12333 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12334 gcc_assert (clauses);
12335 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12337 switch (OMP_CLAUSE_DEPEND_KIND (c))
12339 case OMP_CLAUSE_DEPEND_LAST:
12340 /* Lowering already done at gimplification. */
12341 return;
12342 case OMP_CLAUSE_DEPEND_IN:
12343 cnt[2]++;
12344 break;
12345 case OMP_CLAUSE_DEPEND_OUT:
12346 case OMP_CLAUSE_DEPEND_INOUT:
12347 cnt[0]++;
12348 break;
12349 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12350 cnt[1]++;
12351 break;
12352 case OMP_CLAUSE_DEPEND_DEPOBJ:
12353 cnt[3]++;
12354 break;
12355 case OMP_CLAUSE_DEPEND_INOUTSET:
12356 cnt[4]++;
12357 break;
12358 case OMP_CLAUSE_DEPEND_SOURCE:
12359 case OMP_CLAUSE_DEPEND_SINK:
12360 /* FALLTHRU */
12361 default:
12362 gcc_unreachable ();
12364 if (cnt[1] || cnt[3] || cnt[4])
12365 idx = 5;
12366 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12367 size_t inoutidx = total + idx;
12368 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12369 tree array = create_tmp_var (type);
12370 TREE_ADDRESSABLE (array) = 1;
12371 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12372 NULL_TREE);
12373 if (idx == 5)
12375 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12376 gimple_seq_add_stmt (iseq, g);
12377 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12378 NULL_TREE);
12380 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12381 gimple_seq_add_stmt (iseq, g);
12382 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12384 r = build4 (ARRAY_REF, ptr_type_node, array,
12385 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12386 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12387 gimple_seq_add_stmt (iseq, g);
12389 for (i = 0; i < 5; i++)
12391 if (cnt[i] == 0)
12392 continue;
12393 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12394 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12395 continue;
12396 else
12398 switch (OMP_CLAUSE_DEPEND_KIND (c))
12400 case OMP_CLAUSE_DEPEND_IN:
12401 if (i != 2)
12402 continue;
12403 break;
12404 case OMP_CLAUSE_DEPEND_OUT:
12405 case OMP_CLAUSE_DEPEND_INOUT:
12406 if (i != 0)
12407 continue;
12408 break;
12409 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12410 if (i != 1)
12411 continue;
12412 break;
12413 case OMP_CLAUSE_DEPEND_DEPOBJ:
12414 if (i != 3)
12415 continue;
12416 break;
12417 case OMP_CLAUSE_DEPEND_INOUTSET:
12418 if (i != 4)
12419 continue;
12420 break;
12421 default:
12422 gcc_unreachable ();
12424 tree t = OMP_CLAUSE_DECL (c);
12425 if (i == 4)
12427 t = build4 (ARRAY_REF, ptr_type_node, array,
12428 size_int (inoutidx), NULL_TREE, NULL_TREE);
12429 t = build_fold_addr_expr (t);
12430 inoutidx += 2;
12432 t = fold_convert (ptr_type_node, t);
12433 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12434 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12435 NULL_TREE, NULL_TREE);
12436 g = gimple_build_assign (r, t);
12437 gimple_seq_add_stmt (iseq, g);
12440 if (cnt[4])
12441 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12442 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12443 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12445 tree t = OMP_CLAUSE_DECL (c);
12446 t = fold_convert (ptr_type_node, t);
12447 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12448 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12449 NULL_TREE, NULL_TREE);
12450 g = gimple_build_assign (r, t);
12451 gimple_seq_add_stmt (iseq, g);
12452 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12453 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12454 NULL_TREE, NULL_TREE);
12455 g = gimple_build_assign (r, t);
12456 gimple_seq_add_stmt (iseq, g);
12459 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12460 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12461 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12462 OMP_CLAUSE_CHAIN (c) = *pclauses;
12463 *pclauses = c;
12464 tree clobber = build_clobber (type);
12465 g = gimple_build_assign (array, clobber);
12466 gimple_seq_add_stmt (oseq, g);
12469 /* Lower the OpenMP parallel or task directive in the current statement
12470 in GSI_P. CTX holds context information for the directive. */
12472 static void
12473 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12475 tree clauses;
12476 tree child_fn, t;
12477 gimple *stmt = gsi_stmt (*gsi_p);
12478 gbind *par_bind, *bind, *dep_bind = NULL;
12479 gimple_seq par_body;
12480 location_t loc = gimple_location (stmt);
12482 clauses = gimple_omp_taskreg_clauses (stmt);
12483 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12484 && gimple_omp_task_taskwait_p (stmt))
12486 par_bind = NULL;
12487 par_body = NULL;
12489 else
12491 par_bind
12492 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12493 par_body = gimple_bind_body (par_bind);
12495 child_fn = ctx->cb.dst_fn;
12496 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12497 && !gimple_omp_parallel_combined_p (stmt))
12499 struct walk_stmt_info wi;
12500 int ws_num = 0;
12502 memset (&wi, 0, sizeof (wi));
12503 wi.info = &ws_num;
12504 wi.val_only = true;
12505 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12506 if (ws_num == 1)
12507 gimple_omp_parallel_set_combined_p (stmt, true);
12509 gimple_seq dep_ilist = NULL;
12510 gimple_seq dep_olist = NULL;
12511 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12512 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12514 push_gimplify_context ();
12515 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12516 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12517 &dep_ilist, &dep_olist);
12520 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12521 && gimple_omp_task_taskwait_p (stmt))
12523 if (dep_bind)
12525 gsi_replace (gsi_p, dep_bind, true);
12526 gimple_bind_add_seq (dep_bind, dep_ilist);
12527 gimple_bind_add_stmt (dep_bind, stmt);
12528 gimple_bind_add_seq (dep_bind, dep_olist);
12529 pop_gimplify_context (dep_bind);
12531 return;
12534 if (ctx->srecord_type)
12535 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12537 gimple_seq tskred_ilist = NULL;
12538 gimple_seq tskred_olist = NULL;
12539 if ((is_task_ctx (ctx)
12540 && gimple_omp_task_taskloop_p (ctx->stmt)
12541 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12542 OMP_CLAUSE_REDUCTION))
12543 || (is_parallel_ctx (ctx)
12544 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12545 OMP_CLAUSE__REDUCTEMP_)))
12547 if (dep_bind == NULL)
12549 push_gimplify_context ();
12550 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12552 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12553 : OMP_PARALLEL,
12554 gimple_omp_taskreg_clauses (ctx->stmt),
12555 &tskred_ilist, &tskred_olist);
12558 push_gimplify_context ();
12560 gimple_seq par_olist = NULL;
12561 gimple_seq par_ilist = NULL;
12562 gimple_seq par_rlist = NULL;
12563 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12564 lower_omp (&par_body, ctx);
12565 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12566 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12568 /* Declare all the variables created by mapping and the variables
12569 declared in the scope of the parallel body. */
12570 record_vars_into (ctx->block_vars, child_fn);
12571 maybe_remove_omp_member_access_dummy_vars (par_bind);
12572 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12574 if (ctx->record_type)
12576 ctx->sender_decl
12577 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12578 : ctx->record_type, ".omp_data_o");
12579 DECL_NAMELESS (ctx->sender_decl) = 1;
12580 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12581 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12584 gimple_seq olist = NULL;
12585 gimple_seq ilist = NULL;
12586 lower_send_clauses (clauses, &ilist, &olist, ctx);
12587 lower_send_shared_vars (&ilist, &olist, ctx);
12589 if (ctx->record_type)
12591 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12592 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12593 clobber));
12596 /* Once all the expansions are done, sequence all the different
12597 fragments inside gimple_omp_body. */
12599 gimple_seq new_body = NULL;
12601 if (ctx->record_type)
12603 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12604 /* fixup_child_record_type might have changed receiver_decl's type. */
12605 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12606 gimple_seq_add_stmt (&new_body,
12607 gimple_build_assign (ctx->receiver_decl, t));
12610 gimple_seq_add_seq (&new_body, par_ilist);
12611 gimple_seq_add_seq (&new_body, par_body);
12612 gimple_seq_add_seq (&new_body, par_rlist);
12613 if (ctx->cancellable)
12614 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12615 gimple_seq_add_seq (&new_body, par_olist);
12616 new_body = maybe_catch_exception (new_body);
12617 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12618 gimple_seq_add_stmt (&new_body,
12619 gimple_build_omp_continue (integer_zero_node,
12620 integer_zero_node));
12621 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12622 gimple_omp_set_body (stmt, new_body);
12624 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12625 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12626 else
12627 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12628 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12629 gimple_bind_add_seq (bind, ilist);
12630 gimple_bind_add_stmt (bind, stmt);
12631 gimple_bind_add_seq (bind, olist);
12633 pop_gimplify_context (NULL);
12635 if (dep_bind)
12637 gimple_bind_add_seq (dep_bind, dep_ilist);
12638 gimple_bind_add_seq (dep_bind, tskred_ilist);
12639 gimple_bind_add_stmt (dep_bind, bind);
12640 gimple_bind_add_seq (dep_bind, tskred_olist);
12641 gimple_bind_add_seq (dep_bind, dep_olist);
12642 pop_gimplify_context (dep_bind);
12646 /* Lower the GIMPLE_OMP_TARGET in the current statement
12647 in GSI_P. CTX holds context information for the directive. */
12649 static void
12650 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12652 tree clauses;
12653 tree child_fn, t, c;
12654 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12655 gbind *tgt_bind, *bind, *dep_bind = NULL;
12656 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12657 location_t loc = gimple_location (stmt);
12658 bool offloaded, data_region;
12659 unsigned int map_cnt = 0;
12660 tree in_reduction_clauses = NULL_TREE;
12662 offloaded = is_gimple_omp_offloaded (stmt);
12663 switch (gimple_omp_target_kind (stmt))
12665 case GF_OMP_TARGET_KIND_REGION:
12666 tree *p, *q;
12667 q = &in_reduction_clauses;
12668 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12669 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12671 *q = *p;
12672 q = &OMP_CLAUSE_CHAIN (*q);
12673 *p = OMP_CLAUSE_CHAIN (*p);
12675 else
12676 p = &OMP_CLAUSE_CHAIN (*p);
12677 *q = NULL_TREE;
12678 *p = in_reduction_clauses;
12679 /* FALLTHRU */
12680 case GF_OMP_TARGET_KIND_UPDATE:
12681 case GF_OMP_TARGET_KIND_ENTER_DATA:
12682 case GF_OMP_TARGET_KIND_EXIT_DATA:
12683 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12684 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12685 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12686 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12687 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12688 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12689 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12690 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12691 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12692 data_region = false;
12693 break;
12694 case GF_OMP_TARGET_KIND_DATA:
12695 case GF_OMP_TARGET_KIND_OACC_DATA:
12696 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12697 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12698 data_region = true;
12699 break;
12700 default:
12701 gcc_unreachable ();
12704 /* Ensure that requires map is written via output_offload_tables, even if only
12705 'target (enter/exit) data' is used in the translation unit. */
12706 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12707 g->have_offload = true;
12709 clauses = gimple_omp_target_clauses (stmt);
12711 gimple_seq dep_ilist = NULL;
12712 gimple_seq dep_olist = NULL;
12713 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12714 if (has_depend || in_reduction_clauses)
12716 push_gimplify_context ();
12717 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12718 if (has_depend)
12719 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12720 &dep_ilist, &dep_olist);
12721 if (in_reduction_clauses)
12722 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12723 ctx, NULL);
12726 tgt_bind = NULL;
12727 tgt_body = NULL;
12728 if (offloaded)
12730 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12731 tgt_body = gimple_bind_body (tgt_bind);
12733 else if (data_region)
12734 tgt_body = gimple_omp_body (stmt);
12735 child_fn = ctx->cb.dst_fn;
12737 push_gimplify_context ();
12738 fplist = NULL;
12740 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12741 switch (OMP_CLAUSE_CODE (c))
12743 tree var, x;
12745 default:
12746 break;
12747 case OMP_CLAUSE_MAP:
12748 #if CHECKING_P
12749 /* First check what we're prepared to handle in the following. */
12750 switch (OMP_CLAUSE_MAP_KIND (c))
12752 case GOMP_MAP_ALLOC:
12753 case GOMP_MAP_TO:
12754 case GOMP_MAP_FROM:
12755 case GOMP_MAP_TOFROM:
12756 case GOMP_MAP_POINTER:
12757 case GOMP_MAP_TO_PSET:
12758 case GOMP_MAP_DELETE:
12759 case GOMP_MAP_RELEASE:
12760 case GOMP_MAP_ALWAYS_TO:
12761 case GOMP_MAP_ALWAYS_FROM:
12762 case GOMP_MAP_ALWAYS_TOFROM:
12763 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12764 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12765 case GOMP_MAP_STRUCT:
12766 case GOMP_MAP_ALWAYS_POINTER:
12767 case GOMP_MAP_ATTACH:
12768 case GOMP_MAP_DETACH:
12769 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12770 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12771 break;
12772 case GOMP_MAP_IF_PRESENT:
12773 case GOMP_MAP_FORCE_ALLOC:
12774 case GOMP_MAP_FORCE_TO:
12775 case GOMP_MAP_FORCE_FROM:
12776 case GOMP_MAP_FORCE_TOFROM:
12777 case GOMP_MAP_FORCE_PRESENT:
12778 case GOMP_MAP_FORCE_DEVICEPTR:
12779 case GOMP_MAP_DEVICE_RESIDENT:
12780 case GOMP_MAP_LINK:
12781 case GOMP_MAP_FORCE_DETACH:
12782 gcc_assert (is_gimple_omp_oacc (stmt));
12783 break;
12784 default:
12785 gcc_unreachable ();
12787 #endif
12788 /* FALLTHRU */
12789 case OMP_CLAUSE_TO:
12790 case OMP_CLAUSE_FROM:
12791 oacc_firstprivate:
12792 var = OMP_CLAUSE_DECL (c);
12793 if (!DECL_P (var))
12795 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12796 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12797 && (OMP_CLAUSE_MAP_KIND (c)
12798 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12799 map_cnt++;
12800 continue;
12803 if (DECL_SIZE (var)
12804 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12806 tree var2 = DECL_VALUE_EXPR (var);
12807 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12808 var2 = TREE_OPERAND (var2, 0);
12809 gcc_assert (DECL_P (var2));
12810 var = var2;
12813 if (offloaded
12814 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12815 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12816 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12818 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12820 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12821 && varpool_node::get_create (var)->offloadable)
12822 continue;
12824 tree type = build_pointer_type (TREE_TYPE (var));
12825 tree new_var = lookup_decl (var, ctx);
12826 x = create_tmp_var_raw (type, get_name (new_var));
12827 gimple_add_tmp_var (x);
12828 x = build_simple_mem_ref (x);
12829 SET_DECL_VALUE_EXPR (new_var, x);
12830 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12832 continue;
12835 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12836 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12837 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12838 && is_omp_target (stmt))
12840 gcc_assert (maybe_lookup_field (c, ctx));
12841 map_cnt++;
12842 continue;
12845 if (!maybe_lookup_field (var, ctx))
12846 continue;
12848 /* Don't remap compute constructs' reduction variables, because the
12849 intermediate result must be local to each gang. */
12850 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12851 && is_gimple_omp_oacc (ctx->stmt)
12852 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12854 x = build_receiver_ref (var, true, ctx);
12855 tree new_var = lookup_decl (var, ctx);
12857 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12858 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12859 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12860 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12861 x = build_simple_mem_ref (x);
12862 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12864 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12865 if (omp_privatize_by_reference (new_var)
12866 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12867 || DECL_BY_REFERENCE (var)))
12869 /* Create a local object to hold the instance
12870 value. */
12871 tree type = TREE_TYPE (TREE_TYPE (new_var));
12872 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12873 tree inst = create_tmp_var (type, id);
12874 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12875 x = build_fold_addr_expr (inst);
12877 gimplify_assign (new_var, x, &fplist);
12879 else if (DECL_P (new_var))
12881 SET_DECL_VALUE_EXPR (new_var, x);
12882 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12884 else
12885 gcc_unreachable ();
12887 map_cnt++;
12888 break;
12890 case OMP_CLAUSE_FIRSTPRIVATE:
12891 omp_firstprivate_recv:
12892 gcc_checking_assert (offloaded);
12893 if (is_gimple_omp_oacc (ctx->stmt))
12895 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12896 gcc_checking_assert (!is_oacc_kernels (ctx));
12897 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12898 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12900 goto oacc_firstprivate;
12902 map_cnt++;
12903 var = OMP_CLAUSE_DECL (c);
12904 if (!omp_privatize_by_reference (var)
12905 && !is_gimple_reg_type (TREE_TYPE (var)))
12907 tree new_var = lookup_decl (var, ctx);
12908 if (is_variable_sized (var))
12910 tree pvar = DECL_VALUE_EXPR (var);
12911 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12912 pvar = TREE_OPERAND (pvar, 0);
12913 gcc_assert (DECL_P (pvar));
12914 tree new_pvar = lookup_decl (pvar, ctx);
12915 x = build_fold_indirect_ref (new_pvar);
12916 TREE_THIS_NOTRAP (x) = 1;
12918 else
12919 x = build_receiver_ref (var, true, ctx);
12920 SET_DECL_VALUE_EXPR (new_var, x);
12921 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12923 /* Fortran array descriptors: firstprivate of data + attach. */
12924 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12925 && lang_hooks.decls.omp_array_data (var, true))
12926 map_cnt += 2;
12927 break;
12929 case OMP_CLAUSE_PRIVATE:
12930 gcc_checking_assert (offloaded);
12931 if (is_gimple_omp_oacc (ctx->stmt))
12933 /* No 'private' clauses on OpenACC 'kernels'. */
12934 gcc_checking_assert (!is_oacc_kernels (ctx));
12935 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12936 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12938 break;
12940 var = OMP_CLAUSE_DECL (c);
12941 if (is_variable_sized (var))
12943 tree new_var = lookup_decl (var, ctx);
12944 tree pvar = DECL_VALUE_EXPR (var);
12945 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12946 pvar = TREE_OPERAND (pvar, 0);
12947 gcc_assert (DECL_P (pvar));
12948 tree new_pvar = lookup_decl (pvar, ctx);
12949 x = build_fold_indirect_ref (new_pvar);
12950 TREE_THIS_NOTRAP (x) = 1;
12951 SET_DECL_VALUE_EXPR (new_var, x);
12952 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12954 break;
12956 case OMP_CLAUSE_USE_DEVICE_PTR:
12957 case OMP_CLAUSE_USE_DEVICE_ADDR:
12958 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12959 case OMP_CLAUSE_IS_DEVICE_PTR:
12960 var = OMP_CLAUSE_DECL (c);
12961 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12963 while (TREE_CODE (var) == INDIRECT_REF
12964 || TREE_CODE (var) == ARRAY_REF)
12965 var = TREE_OPERAND (var, 0);
12966 if (lang_hooks.decls.omp_array_data (var, true))
12967 goto omp_firstprivate_recv;
12969 map_cnt++;
12970 if (is_variable_sized (var))
12972 tree new_var = lookup_decl (var, ctx);
12973 tree pvar = DECL_VALUE_EXPR (var);
12974 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12975 pvar = TREE_OPERAND (pvar, 0);
12976 gcc_assert (DECL_P (pvar));
12977 tree new_pvar = lookup_decl (pvar, ctx);
12978 x = build_fold_indirect_ref (new_pvar);
12979 TREE_THIS_NOTRAP (x) = 1;
12980 SET_DECL_VALUE_EXPR (new_var, x);
12981 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12983 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12984 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12985 && !omp_privatize_by_reference (var)
12986 && !omp_is_allocatable_or_ptr (var)
12987 && !lang_hooks.decls.omp_array_data (var, true))
12988 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12990 tree new_var = lookup_decl (var, ctx);
12991 tree type = build_pointer_type (TREE_TYPE (var));
12992 x = create_tmp_var_raw (type, get_name (new_var));
12993 gimple_add_tmp_var (x);
12994 x = build_simple_mem_ref (x);
12995 SET_DECL_VALUE_EXPR (new_var, x);
12996 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12998 else
13000 tree new_var = lookup_decl (var, ctx);
13001 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
13002 gimple_add_tmp_var (x);
13003 SET_DECL_VALUE_EXPR (new_var, x);
13004 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13006 break;
13009 if (offloaded)
13011 target_nesting_level++;
13012 lower_omp (&tgt_body, ctx);
13013 target_nesting_level--;
13015 else if (data_region)
13016 lower_omp (&tgt_body, ctx);
13018 if (offloaded)
13020 /* Declare all the variables created by mapping and the variables
13021 declared in the scope of the target body. */
13022 record_vars_into (ctx->block_vars, child_fn);
13023 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
13024 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
13027 olist = NULL;
13028 ilist = NULL;
13029 if (ctx->record_type)
13031 ctx->sender_decl
13032 = create_tmp_var (ctx->record_type, ".omp_data_arr");
13033 DECL_NAMELESS (ctx->sender_decl) = 1;
13034 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
13035 t = make_tree_vec (3);
13036 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
13037 TREE_VEC_ELT (t, 1)
13038 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
13039 ".omp_data_sizes");
13040 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
13041 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
13042 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
13043 tree tkind_type = short_unsigned_type_node;
13044 int talign_shift = 8;
13045 TREE_VEC_ELT (t, 2)
13046 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
13047 ".omp_data_kinds");
13048 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
13049 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
13050 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
13051 gimple_omp_target_set_data_arg (stmt, t);
13053 vec<constructor_elt, va_gc> *vsize;
13054 vec<constructor_elt, va_gc> *vkind;
13055 vec_alloc (vsize, map_cnt);
13056 vec_alloc (vkind, map_cnt);
13057 unsigned int map_idx = 0;
13059 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13060 switch (OMP_CLAUSE_CODE (c))
13062 tree ovar, nc, s, purpose, var, x, type;
13063 unsigned int talign;
13065 default:
13066 break;
13068 case OMP_CLAUSE_MAP:
13069 case OMP_CLAUSE_TO:
13070 case OMP_CLAUSE_FROM:
13071 oacc_firstprivate_map:
13072 nc = c;
13073 ovar = OMP_CLAUSE_DECL (c);
13074 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13075 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13076 || (OMP_CLAUSE_MAP_KIND (c)
13077 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13078 break;
13079 if (!DECL_P (ovar))
13081 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13082 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13084 nc = OMP_CLAUSE_CHAIN (c);
13085 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13086 == get_base_address (ovar));
13087 ovar = OMP_CLAUSE_DECL (nc);
13089 else
13091 tree x = build_sender_ref (ovar, ctx);
13092 tree v = ovar;
13093 if (in_reduction_clauses
13094 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13095 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13097 v = unshare_expr (v);
13098 tree *p = &v;
13099 while (handled_component_p (*p)
13100 || TREE_CODE (*p) == INDIRECT_REF
13101 || TREE_CODE (*p) == ADDR_EXPR
13102 || TREE_CODE (*p) == MEM_REF
13103 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13104 p = &TREE_OPERAND (*p, 0);
13105 tree d = *p;
13106 if (is_variable_sized (d))
13108 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13109 d = DECL_VALUE_EXPR (d);
13110 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13111 d = TREE_OPERAND (d, 0);
13112 gcc_assert (DECL_P (d));
13114 splay_tree_key key
13115 = (splay_tree_key) &DECL_CONTEXT (d);
13116 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13117 key)->value;
13118 if (d == *p)
13119 *p = nd;
13120 else
13121 *p = build_fold_indirect_ref (nd);
13123 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13124 gimplify_assign (x, v, &ilist);
13125 nc = NULL_TREE;
13128 else
13130 if (DECL_SIZE (ovar)
13131 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13133 tree ovar2 = DECL_VALUE_EXPR (ovar);
13134 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13135 ovar2 = TREE_OPERAND (ovar2, 0);
13136 gcc_assert (DECL_P (ovar2));
13137 ovar = ovar2;
13139 if (!maybe_lookup_field (ovar, ctx)
13140 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13141 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13142 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13143 continue;
13146 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13147 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13148 talign = DECL_ALIGN_UNIT (ovar);
13150 var = NULL_TREE;
13151 if (nc)
13153 if (in_reduction_clauses
13154 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13155 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13157 tree d = ovar;
13158 if (is_variable_sized (d))
13160 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13161 d = DECL_VALUE_EXPR (d);
13162 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13163 d = TREE_OPERAND (d, 0);
13164 gcc_assert (DECL_P (d));
13166 splay_tree_key key
13167 = (splay_tree_key) &DECL_CONTEXT (d);
13168 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13169 key)->value;
13170 if (d == ovar)
13171 var = nd;
13172 else
13173 var = build_fold_indirect_ref (nd);
13175 else
13176 var = lookup_decl_in_outer_ctx (ovar, ctx);
13178 if (nc
13179 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13180 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13181 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13182 && is_omp_target (stmt))
13184 x = build_sender_ref (c, ctx);
13185 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13187 else if (nc)
13189 x = build_sender_ref (ovar, ctx);
13191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13192 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13193 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13194 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13196 gcc_assert (offloaded);
13197 tree avar
13198 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13199 mark_addressable (avar);
13200 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13201 talign = DECL_ALIGN_UNIT (avar);
13202 avar = build_fold_addr_expr (avar);
13203 gimplify_assign (x, avar, &ilist);
13205 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13207 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13208 if (!omp_privatize_by_reference (var))
13210 if (is_gimple_reg (var)
13211 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13212 suppress_warning (var);
13213 var = build_fold_addr_expr (var);
13215 else
13216 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13217 gimplify_assign (x, var, &ilist);
13219 else if (is_gimple_reg (var))
13221 gcc_assert (offloaded);
13222 tree avar = create_tmp_var (TREE_TYPE (var));
13223 mark_addressable (avar);
13224 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13225 if (GOMP_MAP_COPY_TO_P (map_kind)
13226 || map_kind == GOMP_MAP_POINTER
13227 || map_kind == GOMP_MAP_TO_PSET
13228 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13230 /* If we need to initialize a temporary
13231 with VAR because it is not addressable, and
13232 the variable hasn't been initialized yet, then
13233 we'll get a warning for the store to avar.
13234 Don't warn in that case, the mapping might
13235 be implicit. */
13236 suppress_warning (var, OPT_Wuninitialized);
13237 gimplify_assign (avar, var, &ilist);
13239 avar = build_fold_addr_expr (avar);
13240 gimplify_assign (x, avar, &ilist);
13241 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13242 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13243 && !TYPE_READONLY (TREE_TYPE (var)))
13245 x = unshare_expr (x);
13246 x = build_simple_mem_ref (x);
13247 gimplify_assign (var, x, &olist);
13250 else
13252 /* While MAP is handled explicitly by the FE,
13253 for 'target update', only the identified is passed. */
13254 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13255 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13256 && (omp_is_allocatable_or_ptr (var)
13257 && omp_check_optional_argument (var, false)))
13258 var = build_fold_indirect_ref (var);
13259 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13260 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13261 || (!omp_is_allocatable_or_ptr (var)
13262 && !omp_check_optional_argument (var, false)))
13263 var = build_fold_addr_expr (var);
13264 gimplify_assign (x, var, &ilist);
13267 s = NULL_TREE;
13268 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13270 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13271 s = TREE_TYPE (ovar);
13272 if (TREE_CODE (s) == REFERENCE_TYPE
13273 || omp_check_optional_argument (ovar, false))
13274 s = TREE_TYPE (s);
13275 s = TYPE_SIZE_UNIT (s);
13277 else
13278 s = OMP_CLAUSE_SIZE (c);
13279 if (s == NULL_TREE)
13280 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13281 s = fold_convert (size_type_node, s);
13282 purpose = size_int (map_idx++);
13283 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13284 if (TREE_CODE (s) != INTEGER_CST)
13285 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13287 unsigned HOST_WIDE_INT tkind, tkind_zero;
13288 switch (OMP_CLAUSE_CODE (c))
13290 case OMP_CLAUSE_MAP:
13291 tkind = OMP_CLAUSE_MAP_KIND (c);
13292 tkind_zero = tkind;
13293 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13294 switch (tkind)
13296 case GOMP_MAP_ALLOC:
13297 case GOMP_MAP_IF_PRESENT:
13298 case GOMP_MAP_TO:
13299 case GOMP_MAP_FROM:
13300 case GOMP_MAP_TOFROM:
13301 case GOMP_MAP_ALWAYS_TO:
13302 case GOMP_MAP_ALWAYS_FROM:
13303 case GOMP_MAP_ALWAYS_TOFROM:
13304 case GOMP_MAP_RELEASE:
13305 case GOMP_MAP_FORCE_TO:
13306 case GOMP_MAP_FORCE_FROM:
13307 case GOMP_MAP_FORCE_TOFROM:
13308 case GOMP_MAP_FORCE_PRESENT:
13309 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13310 break;
13311 case GOMP_MAP_DELETE:
13312 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13313 default:
13314 break;
13316 if (tkind_zero != tkind)
13318 if (integer_zerop (s))
13319 tkind = tkind_zero;
13320 else if (integer_nonzerop (s))
13321 tkind_zero = tkind;
13323 if (tkind_zero == tkind
13324 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13325 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13326 & ~GOMP_MAP_IMPLICIT)
13327 == 0))
13329 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13330 bits are not interfered by other special bit encodings,
13331 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13332 to see. */
13333 tkind |= GOMP_MAP_IMPLICIT;
13334 tkind_zero = tkind;
13336 break;
13337 case OMP_CLAUSE_FIRSTPRIVATE:
13338 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13339 tkind = GOMP_MAP_TO;
13340 tkind_zero = tkind;
13341 break;
13342 case OMP_CLAUSE_TO:
13343 tkind = GOMP_MAP_TO;
13344 tkind_zero = tkind;
13345 break;
13346 case OMP_CLAUSE_FROM:
13347 tkind = GOMP_MAP_FROM;
13348 tkind_zero = tkind;
13349 break;
13350 default:
13351 gcc_unreachable ();
13353 gcc_checking_assert (tkind
13354 < (HOST_WIDE_INT_C (1U) << talign_shift));
13355 gcc_checking_assert (tkind_zero
13356 < (HOST_WIDE_INT_C (1U) << talign_shift));
13357 talign = ceil_log2 (talign);
13358 tkind |= talign << talign_shift;
13359 tkind_zero |= talign << talign_shift;
13360 gcc_checking_assert (tkind
13361 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13362 gcc_checking_assert (tkind_zero
13363 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13364 if (tkind == tkind_zero)
13365 x = build_int_cstu (tkind_type, tkind);
13366 else
13368 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13369 x = build3 (COND_EXPR, tkind_type,
13370 fold_build2 (EQ_EXPR, boolean_type_node,
13371 unshare_expr (s), size_zero_node),
13372 build_int_cstu (tkind_type, tkind_zero),
13373 build_int_cstu (tkind_type, tkind));
13375 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13376 if (nc && nc != c)
13377 c = nc;
13378 break;
13380 case OMP_CLAUSE_FIRSTPRIVATE:
13381 omp_has_device_addr_descr:
13382 if (is_gimple_omp_oacc (ctx->stmt))
13383 goto oacc_firstprivate_map;
13384 ovar = OMP_CLAUSE_DECL (c);
13385 if (omp_privatize_by_reference (ovar))
13386 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13387 else
13388 talign = DECL_ALIGN_UNIT (ovar);
13389 var = lookup_decl_in_outer_ctx (ovar, ctx);
13390 x = build_sender_ref (ovar, ctx);
13391 tkind = GOMP_MAP_FIRSTPRIVATE;
13392 type = TREE_TYPE (ovar);
13393 if (omp_privatize_by_reference (ovar))
13394 type = TREE_TYPE (type);
13395 if ((INTEGRAL_TYPE_P (type)
13396 && TYPE_PRECISION (type) <= POINTER_SIZE)
13397 || TREE_CODE (type) == POINTER_TYPE)
13399 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13400 tree t = var;
13401 if (omp_privatize_by_reference (var))
13402 t = build_simple_mem_ref (var);
13403 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13404 suppress_warning (var);
13405 if (TREE_CODE (type) != POINTER_TYPE)
13406 t = fold_convert (pointer_sized_int_node, t);
13407 t = fold_convert (TREE_TYPE (x), t);
13408 gimplify_assign (x, t, &ilist);
13410 else if (omp_privatize_by_reference (var))
13411 gimplify_assign (x, var, &ilist);
13412 else if (is_gimple_reg (var))
13414 tree avar = create_tmp_var (TREE_TYPE (var));
13415 mark_addressable (avar);
13416 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13417 suppress_warning (var);
13418 gimplify_assign (avar, var, &ilist);
13419 avar = build_fold_addr_expr (avar);
13420 gimplify_assign (x, avar, &ilist);
13422 else
13424 var = build_fold_addr_expr (var);
13425 gimplify_assign (x, var, &ilist);
13427 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13428 s = size_int (0);
13429 else if (omp_privatize_by_reference (ovar))
13430 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13431 else
13432 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13433 s = fold_convert (size_type_node, s);
13434 purpose = size_int (map_idx++);
13435 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13436 if (TREE_CODE (s) != INTEGER_CST)
13437 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13439 gcc_checking_assert (tkind
13440 < (HOST_WIDE_INT_C (1U) << talign_shift));
13441 talign = ceil_log2 (talign);
13442 tkind |= talign << talign_shift;
13443 gcc_checking_assert (tkind
13444 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13445 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13446 build_int_cstu (tkind_type, tkind));
13447 /* Fortran array descriptors: firstprivate of data + attach. */
13448 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13449 && lang_hooks.decls.omp_array_data (ovar, true))
13451 tree not_null_lb, null_lb, after_lb;
13452 tree var1, var2, size1, size2;
13453 tree present = omp_check_optional_argument (ovar, true);
13454 if (present)
13456 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13457 not_null_lb = create_artificial_label (clause_loc);
13458 null_lb = create_artificial_label (clause_loc);
13459 after_lb = create_artificial_label (clause_loc);
13460 gimple_seq seq = NULL;
13461 present = force_gimple_operand (present, &seq, true,
13462 NULL_TREE);
13463 gimple_seq_add_seq (&ilist, seq);
13464 gimple_seq_add_stmt (&ilist,
13465 gimple_build_cond_from_tree (present,
13466 not_null_lb, null_lb));
13467 gimple_seq_add_stmt (&ilist,
13468 gimple_build_label (not_null_lb));
13470 var1 = lang_hooks.decls.omp_array_data (var, false);
13471 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13472 var2 = build_fold_addr_expr (x);
13473 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13474 var = build_fold_addr_expr (var);
13475 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13476 build_fold_addr_expr (var1), var);
13477 size2 = fold_convert (sizetype, size2);
13478 if (present)
13480 tree tmp = create_tmp_var (TREE_TYPE (var1));
13481 gimplify_assign (tmp, var1, &ilist);
13482 var1 = tmp;
13483 tmp = create_tmp_var (TREE_TYPE (var2));
13484 gimplify_assign (tmp, var2, &ilist);
13485 var2 = tmp;
13486 tmp = create_tmp_var (TREE_TYPE (size1));
13487 gimplify_assign (tmp, size1, &ilist);
13488 size1 = tmp;
13489 tmp = create_tmp_var (TREE_TYPE (size2));
13490 gimplify_assign (tmp, size2, &ilist);
13491 size2 = tmp;
13492 gimple_seq_add_stmt (&ilist, gimple_build_goto (after_lb));
13493 gimple_seq_add_stmt (&ilist, gimple_build_label (null_lb));
13494 gimplify_assign (var1, null_pointer_node, &ilist);
13495 gimplify_assign (var2, null_pointer_node, &ilist);
13496 gimplify_assign (size1, size_zero_node, &ilist);
13497 gimplify_assign (size2, size_zero_node, &ilist);
13498 gimple_seq_add_stmt (&ilist, gimple_build_label (after_lb));
13500 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13501 gimplify_assign (x, var1, &ilist);
13502 tkind = GOMP_MAP_FIRSTPRIVATE;
13503 talign = DECL_ALIGN_UNIT (ovar);
13504 talign = ceil_log2 (talign);
13505 tkind |= talign << talign_shift;
13506 gcc_checking_assert (tkind
13507 <= tree_to_uhwi (
13508 TYPE_MAX_VALUE (tkind_type)));
13509 purpose = size_int (map_idx++);
13510 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13511 if (TREE_CODE (size1) != INTEGER_CST)
13512 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13513 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13514 build_int_cstu (tkind_type, tkind));
13515 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13516 gimplify_assign (x, var2, &ilist);
13517 tkind = GOMP_MAP_ATTACH;
13518 purpose = size_int (map_idx++);
13519 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13520 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13521 build_int_cstu (tkind_type, tkind));
13523 break;
13525 case OMP_CLAUSE_USE_DEVICE_PTR:
13526 case OMP_CLAUSE_USE_DEVICE_ADDR:
13527 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13528 case OMP_CLAUSE_IS_DEVICE_PTR:
13529 ovar = OMP_CLAUSE_DECL (c);
13530 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13532 if (lang_hooks.decls.omp_array_data (ovar, true))
13533 goto omp_has_device_addr_descr;
13534 while (TREE_CODE (ovar) == INDIRECT_REF
13535 || TREE_CODE (ovar) == ARRAY_REF)
13536 ovar = TREE_OPERAND (ovar, 0);
13538 var = lookup_decl_in_outer_ctx (ovar, ctx);
13540 if (lang_hooks.decls.omp_array_data (ovar, true))
13542 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13543 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13544 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13545 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13547 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13548 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13550 tkind = GOMP_MAP_USE_DEVICE_PTR;
13551 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13553 else
13555 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13556 x = build_sender_ref (ovar, ctx);
13559 if (is_gimple_omp_oacc (ctx->stmt))
13561 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13563 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13564 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13567 type = TREE_TYPE (ovar);
13568 if (lang_hooks.decls.omp_array_data (ovar, true))
13569 var = lang_hooks.decls.omp_array_data (var, false);
13570 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13571 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13572 && !omp_privatize_by_reference (ovar)
13573 && !omp_is_allocatable_or_ptr (ovar))
13574 || TREE_CODE (type) == ARRAY_TYPE)
13575 var = build_fold_addr_expr (var);
13576 else
13578 if (omp_privatize_by_reference (ovar)
13579 || omp_check_optional_argument (ovar, false)
13580 || omp_is_allocatable_or_ptr (ovar))
13582 type = TREE_TYPE (type);
13583 if (POINTER_TYPE_P (type)
13584 && TREE_CODE (type) != ARRAY_TYPE
13585 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13586 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13587 && !omp_is_allocatable_or_ptr (ovar))
13588 || (omp_privatize_by_reference (ovar)
13589 && omp_is_allocatable_or_ptr (ovar))))
13590 var = build_simple_mem_ref (var);
13591 var = fold_convert (TREE_TYPE (x), var);
13594 tree present;
13595 present = omp_check_optional_argument (ovar, true);
13596 if (present)
13598 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13599 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13600 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13601 tree new_x = unshare_expr (x);
13602 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13603 fb_rvalue);
13604 gcond *cond = gimple_build_cond_from_tree (present,
13605 notnull_label,
13606 null_label);
13607 gimple_seq_add_stmt (&ilist, cond);
13608 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13609 gimplify_assign (new_x, null_pointer_node, &ilist);
13610 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13611 gimple_seq_add_stmt (&ilist,
13612 gimple_build_label (notnull_label));
13613 gimplify_assign (x, var, &ilist);
13614 gimple_seq_add_stmt (&ilist,
13615 gimple_build_label (opt_arg_label));
13617 else
13618 gimplify_assign (x, var, &ilist);
13619 s = size_int (0);
13620 purpose = size_int (map_idx++);
13621 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13622 gcc_checking_assert (tkind
13623 < (HOST_WIDE_INT_C (1U) << talign_shift));
13624 gcc_checking_assert (tkind
13625 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13626 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13627 build_int_cstu (tkind_type, tkind));
13628 break;
13631 gcc_assert (map_idx == map_cnt);
13633 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13634 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13635 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13636 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13637 for (int i = 1; i <= 2; i++)
13638 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13640 gimple_seq initlist = NULL;
13641 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13642 TREE_VEC_ELT (t, i)),
13643 &initlist, true, NULL_TREE);
13644 gimple_seq_add_seq (&ilist, initlist);
13646 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13647 gimple_seq_add_stmt (&olist,
13648 gimple_build_assign (TREE_VEC_ELT (t, i),
13649 clobber));
13651 else if (omp_maybe_offloaded_ctx (ctx->outer))
13653 tree id = get_identifier ("omp declare target");
13654 tree decl = TREE_VEC_ELT (t, i);
13655 DECL_ATTRIBUTES (decl)
13656 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13657 varpool_node *node = varpool_node::get (decl);
13658 if (node)
13660 node->offloadable = 1;
13661 if (ENABLE_OFFLOADING)
13663 g->have_offload = true;
13664 vec_safe_push (offload_vars, t);
13669 tree clobber = build_clobber (ctx->record_type);
13670 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13671 clobber));
13674 /* Once all the expansions are done, sequence all the different
13675 fragments inside gimple_omp_body. */
13677 new_body = NULL;
13679 if (offloaded
13680 && ctx->record_type)
13682 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13683 /* fixup_child_record_type might have changed receiver_decl's type. */
13684 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13685 gimple_seq_add_stmt (&new_body,
13686 gimple_build_assign (ctx->receiver_decl, t));
13688 gimple_seq_add_seq (&new_body, fplist);
13690 if (offloaded || data_region)
13692 tree prev = NULL_TREE;
13693 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13694 switch (OMP_CLAUSE_CODE (c))
13696 tree var, x;
13697 default:
13698 break;
13699 case OMP_CLAUSE_FIRSTPRIVATE:
13700 omp_firstprivatize_data_region:
13701 if (is_gimple_omp_oacc (ctx->stmt))
13702 break;
13703 var = OMP_CLAUSE_DECL (c);
13704 if (omp_privatize_by_reference (var)
13705 || is_gimple_reg_type (TREE_TYPE (var)))
13707 tree new_var = lookup_decl (var, ctx);
13708 tree type;
13709 type = TREE_TYPE (var);
13710 if (omp_privatize_by_reference (var))
13711 type = TREE_TYPE (type);
13712 if ((INTEGRAL_TYPE_P (type)
13713 && TYPE_PRECISION (type) <= POINTER_SIZE)
13714 || TREE_CODE (type) == POINTER_TYPE)
13716 x = build_receiver_ref (var, false, ctx);
13717 if (TREE_CODE (type) != POINTER_TYPE)
13718 x = fold_convert (pointer_sized_int_node, x);
13719 x = fold_convert (type, x);
13720 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13721 fb_rvalue);
13722 if (omp_privatize_by_reference (var))
13724 tree v = create_tmp_var_raw (type, get_name (var));
13725 gimple_add_tmp_var (v);
13726 TREE_ADDRESSABLE (v) = 1;
13727 gimple_seq_add_stmt (&new_body,
13728 gimple_build_assign (v, x));
13729 x = build_fold_addr_expr (v);
13731 gimple_seq_add_stmt (&new_body,
13732 gimple_build_assign (new_var, x));
13734 else
13736 bool by_ref = !omp_privatize_by_reference (var);
13737 x = build_receiver_ref (var, by_ref, ctx);
13738 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13739 fb_rvalue);
13740 gimple_seq_add_stmt (&new_body,
13741 gimple_build_assign (new_var, x));
13744 else if (is_variable_sized (var))
13746 tree pvar = DECL_VALUE_EXPR (var);
13747 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13748 pvar = TREE_OPERAND (pvar, 0);
13749 gcc_assert (DECL_P (pvar));
13750 tree new_var = lookup_decl (pvar, ctx);
13751 x = build_receiver_ref (var, false, ctx);
13752 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13753 gimple_seq_add_stmt (&new_body,
13754 gimple_build_assign (new_var, x));
13756 break;
13757 case OMP_CLAUSE_PRIVATE:
13758 if (is_gimple_omp_oacc (ctx->stmt))
13759 break;
13760 var = OMP_CLAUSE_DECL (c);
13761 if (omp_privatize_by_reference (var))
13763 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13764 tree new_var = lookup_decl (var, ctx);
13765 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13766 if (TREE_CONSTANT (x))
13768 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13769 get_name (var));
13770 gimple_add_tmp_var (x);
13771 TREE_ADDRESSABLE (x) = 1;
13772 x = build_fold_addr_expr_loc (clause_loc, x);
13774 else
13775 break;
13777 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13778 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13779 gimple_seq_add_stmt (&new_body,
13780 gimple_build_assign (new_var, x));
13782 break;
13783 case OMP_CLAUSE_USE_DEVICE_PTR:
13784 case OMP_CLAUSE_USE_DEVICE_ADDR:
13785 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13786 case OMP_CLAUSE_IS_DEVICE_PTR:
13787 tree new_var;
13788 gimple_seq assign_body;
13789 bool is_array_data;
13790 bool do_optional_check;
13791 assign_body = NULL;
13792 do_optional_check = false;
13793 var = OMP_CLAUSE_DECL (c);
13794 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13795 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13796 goto omp_firstprivatize_data_region;
13798 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13799 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13800 x = build_sender_ref (is_array_data
13801 ? (splay_tree_key) &DECL_NAME (var)
13802 : (splay_tree_key) &DECL_UID (var), ctx);
13803 else
13805 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13807 while (TREE_CODE (var) == INDIRECT_REF
13808 || TREE_CODE (var) == ARRAY_REF)
13809 var = TREE_OPERAND (var, 0);
13811 x = build_receiver_ref (var, false, ctx);
13814 if (is_array_data)
13816 bool is_ref = omp_privatize_by_reference (var);
13817 do_optional_check = true;
13818 /* First, we copy the descriptor data from the host; then
13819 we update its data to point to the target address. */
13820 new_var = lookup_decl (var, ctx);
13821 new_var = DECL_VALUE_EXPR (new_var);
13822 tree v = new_var;
13823 tree v2 = var;
13824 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13825 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13826 v2 = maybe_lookup_decl_in_outer_ctx (var, ctx);
13828 if (is_ref)
13830 v2 = build_fold_indirect_ref (v2);
13831 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13832 gimple_add_tmp_var (v);
13833 TREE_ADDRESSABLE (v) = 1;
13834 gimplify_assign (v, v2, &assign_body);
13835 tree rhs = build_fold_addr_expr (v);
13836 gimple_seq_add_stmt (&assign_body,
13837 gimple_build_assign (new_var, rhs));
13839 else
13840 gimplify_assign (new_var, v2, &assign_body);
13842 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13843 gcc_assert (v2);
13844 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13845 gimple_seq_add_stmt (&assign_body,
13846 gimple_build_assign (v2, x));
13848 else if (is_variable_sized (var))
13850 tree pvar = DECL_VALUE_EXPR (var);
13851 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13852 pvar = TREE_OPERAND (pvar, 0);
13853 gcc_assert (DECL_P (pvar));
13854 new_var = lookup_decl (pvar, ctx);
13855 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13856 gimple_seq_add_stmt (&assign_body,
13857 gimple_build_assign (new_var, x));
13859 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13860 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13861 && !omp_privatize_by_reference (var)
13862 && !omp_is_allocatable_or_ptr (var))
13863 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13865 new_var = lookup_decl (var, ctx);
13866 new_var = DECL_VALUE_EXPR (new_var);
13867 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13868 new_var = TREE_OPERAND (new_var, 0);
13869 gcc_assert (DECL_P (new_var));
13870 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13871 gimple_seq_add_stmt (&assign_body,
13872 gimple_build_assign (new_var, x));
13874 else
13876 tree type = TREE_TYPE (var);
13877 new_var = lookup_decl (var, ctx);
13878 if (omp_privatize_by_reference (var))
13880 type = TREE_TYPE (type);
13881 if (POINTER_TYPE_P (type)
13882 && TREE_CODE (type) != ARRAY_TYPE
13883 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13884 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13885 || (omp_privatize_by_reference (var)
13886 && omp_is_allocatable_or_ptr (var))))
13888 tree v = create_tmp_var_raw (type, get_name (var));
13889 gimple_add_tmp_var (v);
13890 TREE_ADDRESSABLE (v) = 1;
13891 x = fold_convert (type, x);
13892 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13893 fb_rvalue);
13894 gimple_seq_add_stmt (&assign_body,
13895 gimple_build_assign (v, x));
13896 x = build_fold_addr_expr (v);
13897 do_optional_check = true;
13900 new_var = DECL_VALUE_EXPR (new_var);
13901 x = fold_convert (TREE_TYPE (new_var), x);
13902 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13903 gimple_seq_add_stmt (&assign_body,
13904 gimple_build_assign (new_var, x));
13906 tree present;
13907 present = ((do_optional_check
13908 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13909 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13910 : NULL_TREE);
13911 if (present)
13913 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13914 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13915 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13916 glabel *null_glabel = gimple_build_label (null_label);
13917 glabel *notnull_glabel = gimple_build_label (notnull_label);
13918 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13919 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13920 fb_rvalue);
13921 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13922 fb_rvalue);
13923 gcond *cond = gimple_build_cond_from_tree (present,
13924 notnull_label,
13925 null_label);
13926 gimple_seq_add_stmt (&new_body, cond);
13927 gimple_seq_add_stmt (&new_body, null_glabel);
13928 gimplify_assign (new_var, null_pointer_node, &new_body);
13929 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13930 gimple_seq_add_stmt (&new_body, notnull_glabel);
13931 gimple_seq_add_seq (&new_body, assign_body);
13932 gimple_seq_add_stmt (&new_body,
13933 gimple_build_label (opt_arg_label));
13935 else
13936 gimple_seq_add_seq (&new_body, assign_body);
13937 break;
13939 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13940 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13941 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13942 or references to VLAs. */
13943 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13944 switch (OMP_CLAUSE_CODE (c))
13946 tree var;
13947 default:
13948 break;
13949 case OMP_CLAUSE_MAP:
13950 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13951 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13953 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13954 poly_int64 offset = 0;
13955 gcc_assert (prev);
13956 var = OMP_CLAUSE_DECL (c);
13957 if (DECL_P (var)
13958 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13959 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13960 ctx))
13961 && varpool_node::get_create (var)->offloadable)
13962 break;
13963 if (TREE_CODE (var) == INDIRECT_REF
13964 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13965 var = TREE_OPERAND (var, 0);
13966 if (TREE_CODE (var) == COMPONENT_REF)
13968 var = get_addr_base_and_unit_offset (var, &offset);
13969 gcc_assert (var != NULL_TREE && DECL_P (var));
13971 else if (DECL_SIZE (var)
13972 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13974 tree var2 = DECL_VALUE_EXPR (var);
13975 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13976 var2 = TREE_OPERAND (var2, 0);
13977 gcc_assert (DECL_P (var2));
13978 var = var2;
13980 tree new_var = lookup_decl (var, ctx), x;
13981 tree type = TREE_TYPE (new_var);
13982 bool is_ref;
13983 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13984 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13985 == COMPONENT_REF))
13987 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13988 is_ref = true;
13989 new_var = build2 (MEM_REF, type,
13990 build_fold_addr_expr (new_var),
13991 build_int_cst (build_pointer_type (type),
13992 offset));
13994 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13996 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13997 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13998 new_var = build2 (MEM_REF, type,
13999 build_fold_addr_expr (new_var),
14000 build_int_cst (build_pointer_type (type),
14001 offset));
14003 else
14004 is_ref = omp_privatize_by_reference (var);
14005 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14006 is_ref = false;
14007 bool ref_to_array = false;
14008 if (is_ref)
14010 type = TREE_TYPE (type);
14011 if (TREE_CODE (type) == ARRAY_TYPE)
14013 type = build_pointer_type (type);
14014 ref_to_array = true;
14017 else if (TREE_CODE (type) == ARRAY_TYPE)
14019 tree decl2 = DECL_VALUE_EXPR (new_var);
14020 gcc_assert (TREE_CODE (decl2) == MEM_REF);
14021 decl2 = TREE_OPERAND (decl2, 0);
14022 gcc_assert (DECL_P (decl2));
14023 new_var = decl2;
14024 type = TREE_TYPE (new_var);
14026 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
14027 x = fold_convert_loc (clause_loc, type, x);
14028 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
14030 tree bias = OMP_CLAUSE_SIZE (c);
14031 if (DECL_P (bias))
14032 bias = lookup_decl (bias, ctx);
14033 bias = fold_convert_loc (clause_loc, sizetype, bias);
14034 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
14035 bias);
14036 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
14037 TREE_TYPE (x), x, bias);
14039 if (ref_to_array)
14040 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14041 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14042 if (is_ref && !ref_to_array)
14044 tree t = create_tmp_var_raw (type, get_name (var));
14045 gimple_add_tmp_var (t);
14046 TREE_ADDRESSABLE (t) = 1;
14047 gimple_seq_add_stmt (&new_body,
14048 gimple_build_assign (t, x));
14049 x = build_fold_addr_expr_loc (clause_loc, t);
14051 gimple_seq_add_stmt (&new_body,
14052 gimple_build_assign (new_var, x));
14053 prev = NULL_TREE;
14055 else if (OMP_CLAUSE_CHAIN (c)
14056 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
14057 == OMP_CLAUSE_MAP
14058 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14059 == GOMP_MAP_FIRSTPRIVATE_POINTER
14060 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14061 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
14062 prev = c;
14063 break;
14064 case OMP_CLAUSE_PRIVATE:
14065 var = OMP_CLAUSE_DECL (c);
14066 if (is_variable_sized (var))
14068 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14069 tree new_var = lookup_decl (var, ctx);
14070 tree pvar = DECL_VALUE_EXPR (var);
14071 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14072 pvar = TREE_OPERAND (pvar, 0);
14073 gcc_assert (DECL_P (pvar));
14074 tree new_pvar = lookup_decl (pvar, ctx);
14075 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14076 tree al = size_int (DECL_ALIGN (var));
14077 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14078 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14079 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14080 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14081 gimple_seq_add_stmt (&new_body,
14082 gimple_build_assign (new_pvar, x));
14084 else if (omp_privatize_by_reference (var)
14085 && !is_gimple_omp_oacc (ctx->stmt))
14087 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14088 tree new_var = lookup_decl (var, ctx);
14089 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14090 if (TREE_CONSTANT (x))
14091 break;
14092 else
14094 tree atmp
14095 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14096 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14097 tree al = size_int (TYPE_ALIGN (rtype));
14098 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14101 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14102 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14103 gimple_seq_add_stmt (&new_body,
14104 gimple_build_assign (new_var, x));
14106 break;
14109 gimple_seq fork_seq = NULL;
14110 gimple_seq join_seq = NULL;
14112 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
14114 /* If there are reductions on the offloaded region itself, treat
14115 them as a dummy GANG loop. */
14116 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14118 gcall *private_marker = lower_oacc_private_marker (ctx);
14120 if (private_marker)
14121 gimple_call_set_arg (private_marker, 2, level);
14123 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
14124 false, NULL, private_marker, NULL, &fork_seq,
14125 &join_seq, ctx);
14128 gimple_seq_add_seq (&new_body, fork_seq);
14129 gimple_seq_add_seq (&new_body, tgt_body);
14130 gimple_seq_add_seq (&new_body, join_seq);
14132 if (offloaded)
14134 new_body = maybe_catch_exception (new_body);
14135 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14137 gimple_omp_set_body (stmt, new_body);
14140 bind = gimple_build_bind (NULL, NULL,
14141 tgt_bind ? gimple_bind_block (tgt_bind)
14142 : NULL_TREE);
14143 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14144 gimple_bind_add_seq (bind, ilist);
14145 gimple_bind_add_stmt (bind, stmt);
14146 gimple_bind_add_seq (bind, olist);
14148 pop_gimplify_context (NULL);
14150 if (dep_bind)
14152 gimple_bind_add_seq (dep_bind, dep_ilist);
14153 gimple_bind_add_stmt (dep_bind, bind);
14154 gimple_bind_add_seq (dep_bind, dep_olist);
14155 pop_gimplify_context (dep_bind);
14159 /* Expand code for an OpenMP teams directive. */
14161 static void
14162 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14164 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14165 push_gimplify_context ();
14167 tree block = make_node (BLOCK);
14168 gbind *bind = gimple_build_bind (NULL, NULL, block);
14169 gsi_replace (gsi_p, bind, true);
14170 gimple_seq bind_body = NULL;
14171 gimple_seq dlist = NULL;
14172 gimple_seq olist = NULL;
14174 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14175 OMP_CLAUSE_NUM_TEAMS);
14176 tree num_teams_lower = NULL_TREE;
14177 if (num_teams == NULL_TREE)
14178 num_teams = build_int_cst (unsigned_type_node, 0);
14179 else
14181 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14182 if (num_teams_lower)
14184 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14185 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14186 fb_rvalue);
14188 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14189 num_teams = fold_convert (unsigned_type_node, num_teams);
14190 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14192 if (num_teams_lower == NULL_TREE)
14193 num_teams_lower = num_teams;
14194 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14195 OMP_CLAUSE_THREAD_LIMIT);
14196 if (thread_limit == NULL_TREE)
14197 thread_limit = build_int_cst (unsigned_type_node, 0);
14198 else
14200 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14201 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14202 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14203 fb_rvalue);
14205 location_t loc = gimple_location (teams_stmt);
14206 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14207 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14208 tree first = create_tmp_var (rettype);
14209 gimple_seq_add_stmt (&bind_body,
14210 gimple_build_assign (first, build_one_cst (rettype)));
14211 tree llabel = create_artificial_label (loc);
14212 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14213 gimple *call
14214 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14215 first);
14216 gimple_set_location (call, loc);
14217 tree temp = create_tmp_var (rettype);
14218 gimple_call_set_lhs (call, temp);
14219 gimple_seq_add_stmt (&bind_body, call);
14221 tree tlabel = create_artificial_label (loc);
14222 tree flabel = create_artificial_label (loc);
14223 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14224 tlabel, flabel);
14225 gimple_seq_add_stmt (&bind_body, cond);
14226 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14227 gimple_seq_add_stmt (&bind_body,
14228 gimple_build_assign (first, build_zero_cst (rettype)));
14230 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14231 &bind_body, &dlist, ctx, NULL);
14232 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14233 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14234 NULL, ctx);
14235 gimple_seq_add_stmt (&bind_body, teams_stmt);
14237 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14238 gimple_omp_set_body (teams_stmt, NULL);
14239 gimple_seq_add_seq (&bind_body, olist);
14240 gimple_seq_add_seq (&bind_body, dlist);
14241 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14242 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14243 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14244 gimple_bind_set_body (bind, bind_body);
14246 pop_gimplify_context (bind);
14248 gimple_bind_append_vars (bind, ctx->block_vars);
14249 BLOCK_VARS (block) = ctx->block_vars;
14250 if (BLOCK_VARS (block))
14251 TREE_USED (block) = 1;
14254 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14255 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14256 of OMP context, but with make_addressable_vars set. */
14258 static tree
14259 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14260 void *data)
14262 tree t = *tp;
14264 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14265 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14266 && data == NULL
14267 && DECL_HAS_VALUE_EXPR_P (t))
14268 return t;
14270 if (make_addressable_vars
14271 && DECL_P (t)
14272 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14273 return t;
14275 /* If a global variable has been privatized, TREE_CONSTANT on
14276 ADDR_EXPR might be wrong. */
14277 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14278 recompute_tree_invariant_for_addr_expr (t);
14280 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14281 return NULL_TREE;
14284 /* Data to be communicated between lower_omp_regimplify_operands and
14285 lower_omp_regimplify_operands_p. */
14287 struct lower_omp_regimplify_operands_data
14289 omp_context *ctx;
14290 vec<tree> *decls;
14293 /* Helper function for lower_omp_regimplify_operands. Find
14294 omp_member_access_dummy_var vars and adjust temporarily their
14295 DECL_VALUE_EXPRs if needed. */
14297 static tree
14298 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14299 void *data)
14301 tree t = omp_member_access_dummy_var (*tp);
14302 if (t)
14304 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14305 lower_omp_regimplify_operands_data *ldata
14306 = (lower_omp_regimplify_operands_data *) wi->info;
14307 tree o = maybe_lookup_decl (t, ldata->ctx);
14308 if (o != t)
14310 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14311 ldata->decls->safe_push (*tp);
14312 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14313 SET_DECL_VALUE_EXPR (*tp, v);
14316 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14317 return NULL_TREE;
14320 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14321 of omp_member_access_dummy_var vars during regimplification. */
14323 static void
14324 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14325 gimple_stmt_iterator *gsi_p)
14327 auto_vec<tree, 10> decls;
14328 if (ctx)
14330 struct walk_stmt_info wi;
14331 memset (&wi, '\0', sizeof (wi));
14332 struct lower_omp_regimplify_operands_data data;
14333 data.ctx = ctx;
14334 data.decls = &decls;
14335 wi.info = &data;
14336 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14338 gimple_regimplify_operands (stmt, gsi_p);
14339 while (!decls.is_empty ())
14341 tree t = decls.pop ();
14342 tree v = decls.pop ();
14343 SET_DECL_VALUE_EXPR (t, v);
14347 static void
14348 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14350 gimple *stmt = gsi_stmt (*gsi_p);
14351 struct walk_stmt_info wi;
14352 gcall *call_stmt;
14354 if (gimple_has_location (stmt))
14355 input_location = gimple_location (stmt);
14357 if (make_addressable_vars)
14358 memset (&wi, '\0', sizeof (wi));
14360 /* If we have issued syntax errors, avoid doing any heavy lifting.
14361 Just replace the OMP directives with a NOP to avoid
14362 confusing RTL expansion. */
14363 if (seen_error () && is_gimple_omp (stmt))
14365 gsi_replace (gsi_p, gimple_build_nop (), true);
14366 return;
14369 switch (gimple_code (stmt))
14371 case GIMPLE_COND:
14373 gcond *cond_stmt = as_a <gcond *> (stmt);
14374 if ((ctx || make_addressable_vars)
14375 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14376 lower_omp_regimplify_p,
14377 ctx ? NULL : &wi, NULL)
14378 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14379 lower_omp_regimplify_p,
14380 ctx ? NULL : &wi, NULL)))
14381 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14383 break;
14384 case GIMPLE_CATCH:
14385 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14386 break;
14387 case GIMPLE_EH_FILTER:
14388 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14389 break;
14390 case GIMPLE_TRY:
14391 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14392 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14393 break;
14394 case GIMPLE_TRANSACTION:
14395 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14396 ctx);
14397 break;
14398 case GIMPLE_BIND:
14399 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14401 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14402 oacc_privatization_scan_decl_chain (ctx, vars);
14404 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14405 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14406 break;
14407 case GIMPLE_OMP_PARALLEL:
14408 case GIMPLE_OMP_TASK:
14409 ctx = maybe_lookup_ctx (stmt);
14410 gcc_assert (ctx);
14411 if (ctx->cancellable)
14412 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14413 lower_omp_taskreg (gsi_p, ctx);
14414 break;
14415 case GIMPLE_OMP_FOR:
14416 ctx = maybe_lookup_ctx (stmt);
14417 gcc_assert (ctx);
14418 if (ctx->cancellable)
14419 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14420 lower_omp_for (gsi_p, ctx);
14421 break;
14422 case GIMPLE_OMP_SECTIONS:
14423 ctx = maybe_lookup_ctx (stmt);
14424 gcc_assert (ctx);
14425 if (ctx->cancellable)
14426 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14427 lower_omp_sections (gsi_p, ctx);
14428 break;
14429 case GIMPLE_OMP_SCOPE:
14430 ctx = maybe_lookup_ctx (stmt);
14431 gcc_assert (ctx);
14432 lower_omp_scope (gsi_p, ctx);
14433 break;
14434 case GIMPLE_OMP_SINGLE:
14435 ctx = maybe_lookup_ctx (stmt);
14436 gcc_assert (ctx);
14437 lower_omp_single (gsi_p, ctx);
14438 break;
14439 case GIMPLE_OMP_MASTER:
14440 case GIMPLE_OMP_MASKED:
14441 ctx = maybe_lookup_ctx (stmt);
14442 gcc_assert (ctx);
14443 lower_omp_master (gsi_p, ctx);
14444 break;
14445 case GIMPLE_OMP_TASKGROUP:
14446 ctx = maybe_lookup_ctx (stmt);
14447 gcc_assert (ctx);
14448 lower_omp_taskgroup (gsi_p, ctx);
14449 break;
14450 case GIMPLE_OMP_ORDERED:
14451 ctx = maybe_lookup_ctx (stmt);
14452 gcc_assert (ctx);
14453 lower_omp_ordered (gsi_p, ctx);
14454 break;
14455 case GIMPLE_OMP_SCAN:
14456 ctx = maybe_lookup_ctx (stmt);
14457 gcc_assert (ctx);
14458 lower_omp_scan (gsi_p, ctx);
14459 break;
14460 case GIMPLE_OMP_CRITICAL:
14461 ctx = maybe_lookup_ctx (stmt);
14462 gcc_assert (ctx);
14463 lower_omp_critical (gsi_p, ctx);
14464 break;
14465 case GIMPLE_OMP_ATOMIC_LOAD:
14466 if ((ctx || make_addressable_vars)
14467 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14468 as_a <gomp_atomic_load *> (stmt)),
14469 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14470 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14471 break;
14472 case GIMPLE_OMP_TARGET:
14473 ctx = maybe_lookup_ctx (stmt);
14474 gcc_assert (ctx);
14475 lower_omp_target (gsi_p, ctx);
14476 break;
14477 case GIMPLE_OMP_TEAMS:
14478 ctx = maybe_lookup_ctx (stmt);
14479 gcc_assert (ctx);
14480 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14481 lower_omp_taskreg (gsi_p, ctx);
14482 else
14483 lower_omp_teams (gsi_p, ctx);
14484 break;
14485 case GIMPLE_CALL:
14486 tree fndecl;
14487 call_stmt = as_a <gcall *> (stmt);
14488 fndecl = gimple_call_fndecl (call_stmt);
14489 if (fndecl
14490 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14491 switch (DECL_FUNCTION_CODE (fndecl))
14493 case BUILT_IN_GOMP_BARRIER:
14494 if (ctx == NULL)
14495 break;
14496 /* FALLTHRU */
14497 case BUILT_IN_GOMP_CANCEL:
14498 case BUILT_IN_GOMP_CANCELLATION_POINT:
14499 omp_context *cctx;
14500 cctx = ctx;
14501 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14502 cctx = cctx->outer;
14503 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14504 if (!cctx->cancellable)
14506 if (DECL_FUNCTION_CODE (fndecl)
14507 == BUILT_IN_GOMP_CANCELLATION_POINT)
14509 stmt = gimple_build_nop ();
14510 gsi_replace (gsi_p, stmt, false);
14512 break;
14514 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14516 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14517 gimple_call_set_fndecl (call_stmt, fndecl);
14518 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14520 tree lhs;
14521 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14522 gimple_call_set_lhs (call_stmt, lhs);
14523 tree fallthru_label;
14524 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14525 gimple *g;
14526 g = gimple_build_label (fallthru_label);
14527 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14528 g = gimple_build_cond (NE_EXPR, lhs,
14529 fold_convert (TREE_TYPE (lhs),
14530 boolean_false_node),
14531 cctx->cancel_label, fallthru_label);
14532 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14533 break;
14534 default:
14535 break;
14537 goto regimplify;
14539 case GIMPLE_ASSIGN:
14540 for (omp_context *up = ctx; up; up = up->outer)
14542 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14543 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14544 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14545 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14546 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14547 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14548 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14549 && (gimple_omp_target_kind (up->stmt)
14550 == GF_OMP_TARGET_KIND_DATA)))
14551 continue;
14552 else if (!up->lastprivate_conditional_map)
14553 break;
14554 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14555 if (TREE_CODE (lhs) == MEM_REF
14556 && DECL_P (TREE_OPERAND (lhs, 0))
14557 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14558 0))) == REFERENCE_TYPE)
14559 lhs = TREE_OPERAND (lhs, 0);
14560 if (DECL_P (lhs))
14561 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14563 tree clauses;
14564 if (up->combined_into_simd_safelen1)
14566 up = up->outer;
14567 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14568 up = up->outer;
14570 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14571 clauses = gimple_omp_for_clauses (up->stmt);
14572 else
14573 clauses = gimple_omp_sections_clauses (up->stmt);
14574 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14575 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14576 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14577 OMP_CLAUSE__CONDTEMP_);
14578 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14579 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14580 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14583 /* FALLTHRU */
14585 default:
14586 regimplify:
14587 if ((ctx || make_addressable_vars)
14588 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14589 ctx ? NULL : &wi))
14591 /* Just remove clobbers, this should happen only if we have
14592 "privatized" local addressable variables in SIMD regions,
14593 the clobber isn't needed in that case and gimplifying address
14594 of the ARRAY_REF into a pointer and creating MEM_REF based
14595 clobber would create worse code than we get with the clobber
14596 dropped. */
14597 if (gimple_clobber_p (stmt))
14599 gsi_replace (gsi_p, gimple_build_nop (), true);
14600 break;
14602 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14604 break;
14608 static void
14609 lower_omp (gimple_seq *body, omp_context *ctx)
14611 location_t saved_location = input_location;
14612 gimple_stmt_iterator gsi;
14613 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14614 lower_omp_1 (&gsi, ctx);
14615 /* During gimplification, we haven't folded statments inside offloading
14616 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14617 if (target_nesting_level || taskreg_nesting_level)
14618 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14619 fold_stmt (&gsi);
14620 input_location = saved_location;
14623 /* Main entry point. */
14625 static unsigned int
14626 execute_lower_omp (void)
14628 gimple_seq body;
14629 int i;
14630 omp_context *ctx;
14632 /* This pass always runs, to provide PROP_gimple_lomp.
14633 But often, there is nothing to do. */
14634 if (flag_openacc == 0 && flag_openmp == 0
14635 && flag_openmp_simd == 0)
14636 return 0;
14638 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14639 delete_omp_context);
14641 body = gimple_body (current_function_decl);
14643 scan_omp (&body, NULL);
14644 gcc_assert (taskreg_nesting_level == 0);
14645 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14646 finish_taskreg_scan (ctx);
14647 taskreg_contexts.release ();
14649 if (all_contexts->root)
14651 if (make_addressable_vars)
14652 push_gimplify_context ();
14653 lower_omp (&body, NULL);
14654 if (make_addressable_vars)
14655 pop_gimplify_context (NULL);
14658 if (all_contexts)
14660 splay_tree_delete (all_contexts);
14661 all_contexts = NULL;
14663 BITMAP_FREE (make_addressable_vars);
14664 BITMAP_FREE (global_nonaddressable_vars);
14666 /* If current function is a method, remove artificial dummy VAR_DECL created
14667 for non-static data member privatization, they aren't needed for
14668 debuginfo nor anything else, have been already replaced everywhere in the
14669 IL and cause problems with LTO. */
14670 if (DECL_ARGUMENTS (current_function_decl)
14671 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14672 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14673 == POINTER_TYPE))
14674 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14676 for (auto task_stmt : task_cpyfns)
14677 finalize_task_copyfn (task_stmt);
14678 task_cpyfns.release ();
14679 return 0;
14682 namespace {
14684 const pass_data pass_data_lower_omp =
14686 GIMPLE_PASS, /* type */
14687 "omplower", /* name */
14688 OPTGROUP_OMP, /* optinfo_flags */
14689 TV_NONE, /* tv_id */
14690 PROP_gimple_any, /* properties_required */
14691 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14692 0, /* properties_destroyed */
14693 0, /* todo_flags_start */
14694 0, /* todo_flags_finish */
14697 class pass_lower_omp : public gimple_opt_pass
14699 public:
14700 pass_lower_omp (gcc::context *ctxt)
14701 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14704 /* opt_pass methods: */
14705 unsigned int execute (function *) final override
14707 return execute_lower_omp ();
14710 }; // class pass_lower_omp
14712 } // anon namespace
14714 gimple_opt_pass *
14715 make_pass_lower_omp (gcc::context *ctxt)
14717 return new pass_lower_omp (ctxt);
14720 /* The following is a utility to diagnose structured block violations.
14721 It is not part of the "omplower" pass, as that's invoked too late. It
14722 should be invoked by the respective front ends after gimplification. */
14724 static splay_tree all_labels;
14726 /* Check for mismatched contexts and generate an error if needed. Return
14727 true if an error is detected. */
14729 static bool
14730 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14731 gimple *branch_ctx, gimple *label_ctx)
14733 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14734 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14736 if (label_ctx == branch_ctx)
14737 return false;
14739 const char* kind = NULL;
14741 if (flag_openacc)
14743 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14744 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14746 gcc_checking_assert (kind == NULL);
14747 kind = "OpenACC";
14750 if (kind == NULL)
14752 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14753 kind = "OpenMP";
14756 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14757 so we could traverse it and issue a correct "exit" or "enter" error
14758 message upon a structured block violation.
14760 We built the context by building a list with tree_cons'ing, but there is
14761 no easy counterpart in gimple tuples. It seems like far too much work
14762 for issuing exit/enter error messages. If someone really misses the
14763 distinct error message... patches welcome. */
14765 #if 0
14766 /* Try to avoid confusing the user by producing and error message
14767 with correct "exit" or "enter" verbiage. We prefer "exit"
14768 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14769 if (branch_ctx == NULL)
14770 exit_p = false;
14771 else
14773 while (label_ctx)
14775 if (TREE_VALUE (label_ctx) == branch_ctx)
14777 exit_p = false;
14778 break;
14780 label_ctx = TREE_CHAIN (label_ctx);
14784 if (exit_p)
14785 error ("invalid exit from %s structured block", kind);
14786 else
14787 error ("invalid entry to %s structured block", kind);
14788 #endif
14790 /* If it's obvious we have an invalid entry, be specific about the error. */
14791 if (branch_ctx == NULL)
14792 error ("invalid entry to %s structured block", kind);
14793 else
14795 /* Otherwise, be vague and lazy, but efficient. */
14796 error ("invalid branch to/from %s structured block", kind);
14799 gsi_replace (gsi_p, gimple_build_nop (), false);
14800 return true;
14803 /* Pass 1: Create a minimal tree of structured blocks, and record
14804 where each label is found. */
14806 static tree
14807 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14808 struct walk_stmt_info *wi)
14810 gimple *context = (gimple *) wi->info;
14811 gimple *inner_context;
14812 gimple *stmt = gsi_stmt (*gsi_p);
14814 *handled_ops_p = true;
14816 switch (gimple_code (stmt))
14818 WALK_SUBSTMTS;
14820 case GIMPLE_OMP_PARALLEL:
14821 case GIMPLE_OMP_TASK:
14822 case GIMPLE_OMP_SCOPE:
14823 case GIMPLE_OMP_SECTIONS:
14824 case GIMPLE_OMP_SINGLE:
14825 case GIMPLE_OMP_SECTION:
14826 case GIMPLE_OMP_MASTER:
14827 case GIMPLE_OMP_MASKED:
14828 case GIMPLE_OMP_ORDERED:
14829 case GIMPLE_OMP_SCAN:
14830 case GIMPLE_OMP_CRITICAL:
14831 case GIMPLE_OMP_TARGET:
14832 case GIMPLE_OMP_TEAMS:
14833 case GIMPLE_OMP_TASKGROUP:
14834 /* The minimal context here is just the current OMP construct. */
14835 inner_context = stmt;
14836 wi->info = inner_context;
14837 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14838 wi->info = context;
14839 break;
14841 case GIMPLE_OMP_FOR:
14842 inner_context = stmt;
14843 wi->info = inner_context;
14844 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14845 walk them. */
14846 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14847 diagnose_sb_1, NULL, wi);
14848 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14849 wi->info = context;
14850 break;
14852 case GIMPLE_LABEL:
14853 splay_tree_insert (all_labels,
14854 (splay_tree_key) gimple_label_label (
14855 as_a <glabel *> (stmt)),
14856 (splay_tree_value) context);
14857 break;
14859 default:
14860 break;
14863 return NULL_TREE;
14866 /* Pass 2: Check each branch and see if its context differs from that of
14867 the destination label's context. */
14869 static tree
14870 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14871 struct walk_stmt_info *wi)
14873 gimple *context = (gimple *) wi->info;
14874 splay_tree_node n;
14875 gimple *stmt = gsi_stmt (*gsi_p);
14877 *handled_ops_p = true;
14879 switch (gimple_code (stmt))
14881 WALK_SUBSTMTS;
14883 case GIMPLE_OMP_PARALLEL:
14884 case GIMPLE_OMP_TASK:
14885 case GIMPLE_OMP_SCOPE:
14886 case GIMPLE_OMP_SECTIONS:
14887 case GIMPLE_OMP_SINGLE:
14888 case GIMPLE_OMP_SECTION:
14889 case GIMPLE_OMP_MASTER:
14890 case GIMPLE_OMP_MASKED:
14891 case GIMPLE_OMP_ORDERED:
14892 case GIMPLE_OMP_SCAN:
14893 case GIMPLE_OMP_CRITICAL:
14894 case GIMPLE_OMP_TARGET:
14895 case GIMPLE_OMP_TEAMS:
14896 case GIMPLE_OMP_TASKGROUP:
14897 wi->info = stmt;
14898 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14899 wi->info = context;
14900 break;
14902 case GIMPLE_OMP_FOR:
14903 wi->info = stmt;
14904 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14905 walk them. */
14906 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14907 diagnose_sb_2, NULL, wi);
14908 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14909 wi->info = context;
14910 break;
14912 case GIMPLE_COND:
14914 gcond *cond_stmt = as_a <gcond *> (stmt);
14915 tree lab = gimple_cond_true_label (cond_stmt);
14916 if (lab)
14918 n = splay_tree_lookup (all_labels,
14919 (splay_tree_key) lab);
14920 diagnose_sb_0 (gsi_p, context,
14921 n ? (gimple *) n->value : NULL);
14923 lab = gimple_cond_false_label (cond_stmt);
14924 if (lab)
14926 n = splay_tree_lookup (all_labels,
14927 (splay_tree_key) lab);
14928 diagnose_sb_0 (gsi_p, context,
14929 n ? (gimple *) n->value : NULL);
14932 break;
14934 case GIMPLE_GOTO:
14936 tree lab = gimple_goto_dest (stmt);
14937 if (TREE_CODE (lab) != LABEL_DECL)
14938 break;
14940 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14941 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14943 break;
14945 case GIMPLE_SWITCH:
14947 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14948 unsigned int i;
14949 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14951 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14952 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14953 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14954 break;
14957 break;
14959 case GIMPLE_RETURN:
14960 diagnose_sb_0 (gsi_p, context, NULL);
14961 break;
14963 default:
14964 break;
14967 return NULL_TREE;
14970 static unsigned int
14971 diagnose_omp_structured_block_errors (void)
14973 struct walk_stmt_info wi;
14974 gimple_seq body = gimple_body (current_function_decl);
14976 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14978 memset (&wi, 0, sizeof (wi));
14979 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14981 memset (&wi, 0, sizeof (wi));
14982 wi.want_locations = true;
14983 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14985 gimple_set_body (current_function_decl, body);
14987 splay_tree_delete (all_labels);
14988 all_labels = NULL;
14990 return 0;
14993 namespace {
14995 const pass_data pass_data_diagnose_omp_blocks =
14997 GIMPLE_PASS, /* type */
14998 "*diagnose_omp_blocks", /* name */
14999 OPTGROUP_OMP, /* optinfo_flags */
15000 TV_NONE, /* tv_id */
15001 PROP_gimple_any, /* properties_required */
15002 0, /* properties_provided */
15003 0, /* properties_destroyed */
15004 0, /* todo_flags_start */
15005 0, /* todo_flags_finish */
15008 class pass_diagnose_omp_blocks : public gimple_opt_pass
15010 public:
15011 pass_diagnose_omp_blocks (gcc::context *ctxt)
15012 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
15015 /* opt_pass methods: */
15016 bool gate (function *) final override
15018 return flag_openacc || flag_openmp || flag_openmp_simd;
15020 unsigned int execute (function *) final override
15022 return diagnose_omp_structured_block_errors ();
15025 }; // class pass_diagnose_omp_blocks
15027 } // anon namespace
15029 gimple_opt_pass *
15030 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
15032 return new pass_diagnose_omp_blocks (ctxt);
15036 #include "gt-omp-low.h"