re PR middle-end/91603 (Unaligned access in expand_assignment)
[official-gcc.git] / gcc / omp-low.c
blob444610b0096a807a52f2032e86d39cb6702d7e69
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
138 /* True if this construct can be cancelled. */
139 bool cancellable;
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen1;
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase;
154 /* True if there is order(concurrent) clause on the construct. */
155 bool order_concurrent;
157 /* True if there is bind clause on the construct (i.e. a loop construct). */
158 bool loop_p;
161 static splay_tree all_contexts;
162 static int taskreg_nesting_level;
163 static int target_nesting_level;
164 static bitmap task_shared_vars;
165 static bitmap global_nonaddressable_vars;
166 static vec<omp_context *> taskreg_contexts;
168 static void scan_omp (gimple_seq *, omp_context *);
169 static tree scan_omp_1_op (tree *, int *, void *);
171 #define WALK_SUBSTMTS \
172 case GIMPLE_BIND: \
173 case GIMPLE_TRY: \
174 case GIMPLE_CATCH: \
175 case GIMPLE_EH_FILTER: \
176 case GIMPLE_TRANSACTION: \
177 /* The sub-statements for these should be walked. */ \
178 *handled_ops_p = false; \
179 break;
181 /* Return true if CTX corresponds to an oacc parallel region. */
183 static bool
184 is_oacc_parallel (omp_context *ctx)
186 enum gimple_code outer_type = gimple_code (ctx->stmt);
187 return ((outer_type == GIMPLE_OMP_TARGET)
188 && (gimple_omp_target_kind (ctx->stmt)
189 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
192 /* Return true if CTX corresponds to an oacc kernels region. */
194 static bool
195 is_oacc_kernels (omp_context *ctx)
197 enum gimple_code outer_type = gimple_code (ctx->stmt);
198 return ((outer_type == GIMPLE_OMP_TARGET)
199 && (gimple_omp_target_kind (ctx->stmt)
200 == GF_OMP_TARGET_KIND_OACC_KERNELS));
203 /* If DECL is the artificial dummy VAR_DECL created for non-static
204 data member privatization, return the underlying "this" parameter,
205 otherwise return NULL. */
207 tree
208 omp_member_access_dummy_var (tree decl)
210 if (!VAR_P (decl)
211 || !DECL_ARTIFICIAL (decl)
212 || !DECL_IGNORED_P (decl)
213 || !DECL_HAS_VALUE_EXPR_P (decl)
214 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
215 return NULL_TREE;
217 tree v = DECL_VALUE_EXPR (decl);
218 if (TREE_CODE (v) != COMPONENT_REF)
219 return NULL_TREE;
221 while (1)
222 switch (TREE_CODE (v))
224 case COMPONENT_REF:
225 case MEM_REF:
226 case INDIRECT_REF:
227 CASE_CONVERT:
228 case POINTER_PLUS_EXPR:
229 v = TREE_OPERAND (v, 0);
230 continue;
231 case PARM_DECL:
232 if (DECL_CONTEXT (v) == current_function_decl
233 && DECL_ARTIFICIAL (v)
234 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
235 return v;
236 return NULL_TREE;
237 default:
238 return NULL_TREE;
242 /* Helper for unshare_and_remap, called through walk_tree. */
244 static tree
245 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
247 tree *pair = (tree *) data;
248 if (*tp == pair[0])
250 *tp = unshare_expr (pair[1]);
251 *walk_subtrees = 0;
253 else if (IS_TYPE_OR_DECL_P (*tp))
254 *walk_subtrees = 0;
255 return NULL_TREE;
258 /* Return unshare_expr (X) with all occurrences of FROM
259 replaced with TO. */
261 static tree
262 unshare_and_remap (tree x, tree from, tree to)
264 tree pair[2] = { from, to };
265 x = unshare_expr (x);
266 walk_tree (&x, unshare_and_remap_1, pair, NULL);
267 return x;
270 /* Convenience function for calling scan_omp_1_op on tree operands. */
272 static inline tree
273 scan_omp_op (tree *tp, omp_context *ctx)
275 struct walk_stmt_info wi;
277 memset (&wi, 0, sizeof (wi));
278 wi.info = ctx;
279 wi.want_locations = true;
281 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
284 static void lower_omp (gimple_seq *, omp_context *);
285 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
286 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
288 /* Return true if CTX is for an omp parallel. */
290 static inline bool
291 is_parallel_ctx (omp_context *ctx)
293 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
297 /* Return true if CTX is for an omp task. */
299 static inline bool
300 is_task_ctx (omp_context *ctx)
302 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
306 /* Return true if CTX is for an omp taskloop. */
308 static inline bool
309 is_taskloop_ctx (omp_context *ctx)
311 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
312 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
316 /* Return true if CTX is for a host omp teams. */
318 static inline bool
319 is_host_teams_ctx (omp_context *ctx)
321 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
322 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
325 /* Return true if CTX is for an omp parallel or omp task or host omp teams
326 (the last one is strictly not a task region in OpenMP speak, but we
327 need to treat it similarly). */
329 static inline bool
330 is_taskreg_ctx (omp_context *ctx)
332 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
335 /* Return true if EXPR is variable sized. */
337 static inline bool
338 is_variable_sized (const_tree expr)
340 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
343 /* Lookup variables. The "maybe" form
344 allows for the variable form to not have been entered, otherwise we
345 assert that the variable must have been entered. */
347 static inline tree
348 lookup_decl (tree var, omp_context *ctx)
350 tree *n = ctx->cb.decl_map->get (var);
351 return *n;
354 static inline tree
355 maybe_lookup_decl (const_tree var, omp_context *ctx)
357 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
358 return n ? *n : NULL_TREE;
361 static inline tree
362 lookup_field (tree var, omp_context *ctx)
364 splay_tree_node n;
365 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
366 return (tree) n->value;
369 static inline tree
370 lookup_sfield (splay_tree_key key, omp_context *ctx)
372 splay_tree_node n;
373 n = splay_tree_lookup (ctx->sfield_map
374 ? ctx->sfield_map : ctx->field_map, key);
375 return (tree) n->value;
378 static inline tree
379 lookup_sfield (tree var, omp_context *ctx)
381 return lookup_sfield ((splay_tree_key) var, ctx);
384 static inline tree
385 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
387 splay_tree_node n;
388 n = splay_tree_lookup (ctx->field_map, key);
389 return n ? (tree) n->value : NULL_TREE;
392 static inline tree
393 maybe_lookup_field (tree var, omp_context *ctx)
395 return maybe_lookup_field ((splay_tree_key) var, ctx);
398 /* Return true if DECL should be copied by pointer. SHARED_CTX is
399 the parallel context if DECL is to be shared. */
401 static bool
402 use_pointer_for_field (tree decl, omp_context *shared_ctx)
404 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
405 || TYPE_ATOMIC (TREE_TYPE (decl)))
406 return true;
408 /* We can only use copy-in/copy-out semantics for shared variables
409 when we know the value is not accessible from an outer scope. */
410 if (shared_ctx)
412 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
414 /* ??? Trivially accessible from anywhere. But why would we even
415 be passing an address in this case? Should we simply assert
416 this to be false, or should we have a cleanup pass that removes
417 these from the list of mappings? */
418 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
419 return true;
421 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
422 without analyzing the expression whether or not its location
423 is accessible to anyone else. In the case of nested parallel
424 regions it certainly may be. */
425 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
426 return true;
428 /* Do not use copy-in/copy-out for variables that have their
429 address taken. */
430 if (is_global_var (decl))
432 /* For file scope vars, track whether we've seen them as
433 non-addressable initially and in that case, keep the same
434 answer for the duration of the pass, even when they are made
435 addressable later on e.g. through reduction expansion. Global
436 variables which weren't addressable before the pass will not
437 have their privatized copies address taken. See PR91216. */
438 if (!TREE_ADDRESSABLE (decl))
440 if (!global_nonaddressable_vars)
441 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
444 else if (!global_nonaddressable_vars
445 || !bitmap_bit_p (global_nonaddressable_vars,
446 DECL_UID (decl)))
447 return true;
449 else if (TREE_ADDRESSABLE (decl))
450 return true;
452 /* lower_send_shared_vars only uses copy-in, but not copy-out
453 for these. */
454 if (TREE_READONLY (decl)
455 || ((TREE_CODE (decl) == RESULT_DECL
456 || TREE_CODE (decl) == PARM_DECL)
457 && DECL_BY_REFERENCE (decl)))
458 return false;
460 /* Disallow copy-in/out in nested parallel if
461 decl is shared in outer parallel, otherwise
462 each thread could store the shared variable
463 in its own copy-in location, making the
464 variable no longer really shared. */
465 if (shared_ctx->is_nested)
467 omp_context *up;
469 for (up = shared_ctx->outer; up; up = up->outer)
470 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
471 break;
473 if (up)
475 tree c;
477 for (c = gimple_omp_taskreg_clauses (up->stmt);
478 c; c = OMP_CLAUSE_CHAIN (c))
479 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
480 && OMP_CLAUSE_DECL (c) == decl)
481 break;
483 if (c)
484 goto maybe_mark_addressable_and_ret;
488 /* For tasks avoid using copy-in/out. As tasks can be
489 deferred or executed in different thread, when GOMP_task
490 returns, the task hasn't necessarily terminated. */
491 if (is_task_ctx (shared_ctx))
493 tree outer;
494 maybe_mark_addressable_and_ret:
495 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
496 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
498 /* Taking address of OUTER in lower_send_shared_vars
499 might need regimplification of everything that uses the
500 variable. */
501 if (!task_shared_vars)
502 task_shared_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
504 TREE_ADDRESSABLE (outer) = 1;
506 return true;
510 return false;
513 /* Construct a new automatic decl similar to VAR. */
515 static tree
516 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
518 tree copy = copy_var_decl (var, name, type);
520 DECL_CONTEXT (copy) = current_function_decl;
521 DECL_CHAIN (copy) = ctx->block_vars;
522 /* If VAR is listed in task_shared_vars, it means it wasn't
523 originally addressable and is just because task needs to take
524 it's address. But we don't need to take address of privatizations
525 from that var. */
526 if (TREE_ADDRESSABLE (var)
527 && ((task_shared_vars
528 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
529 || (global_nonaddressable_vars
530 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
531 TREE_ADDRESSABLE (copy) = 0;
532 ctx->block_vars = copy;
534 return copy;
537 static tree
538 omp_copy_decl_1 (tree var, omp_context *ctx)
540 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
543 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
544 as appropriate. */
545 static tree
546 omp_build_component_ref (tree obj, tree field)
548 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
549 if (TREE_THIS_VOLATILE (field))
550 TREE_THIS_VOLATILE (ret) |= 1;
551 if (TREE_READONLY (field))
552 TREE_READONLY (ret) |= 1;
553 return ret;
556 /* Build tree nodes to access the field for VAR on the receiver side. */
558 static tree
559 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
561 tree x, field = lookup_field (var, ctx);
563 /* If the receiver record type was remapped in the child function,
564 remap the field into the new record type. */
565 x = maybe_lookup_field (field, ctx);
566 if (x != NULL)
567 field = x;
569 x = build_simple_mem_ref (ctx->receiver_decl);
570 TREE_THIS_NOTRAP (x) = 1;
571 x = omp_build_component_ref (x, field);
572 if (by_ref)
574 x = build_simple_mem_ref (x);
575 TREE_THIS_NOTRAP (x) = 1;
578 return x;
581 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
582 of a parallel, this is a component reference; for workshare constructs
583 this is some variable. */
585 static tree
586 build_outer_var_ref (tree var, omp_context *ctx,
587 enum omp_clause_code code = OMP_CLAUSE_ERROR)
589 tree x;
590 omp_context *outer = ctx->outer;
591 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
592 outer = outer->outer;
594 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
595 x = var;
596 else if (is_variable_sized (var))
598 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
599 x = build_outer_var_ref (x, ctx, code);
600 x = build_simple_mem_ref (x);
602 else if (is_taskreg_ctx (ctx))
604 bool by_ref = use_pointer_for_field (var, NULL);
605 x = build_receiver_ref (var, by_ref, ctx);
607 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
608 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
609 || ctx->loop_p
610 || (code == OMP_CLAUSE_PRIVATE
611 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
612 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
613 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
615 /* #pragma omp simd isn't a worksharing construct, and can reference
616 even private vars in its linear etc. clauses.
617 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
618 to private vars in all worksharing constructs. */
619 x = NULL_TREE;
620 if (outer && is_taskreg_ctx (outer))
621 x = lookup_decl (var, outer);
622 else if (outer)
623 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
624 if (x == NULL_TREE)
625 x = var;
627 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
629 gcc_assert (outer);
630 splay_tree_node n
631 = splay_tree_lookup (outer->field_map,
632 (splay_tree_key) &DECL_UID (var));
633 if (n == NULL)
635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
636 x = var;
637 else
638 x = lookup_decl (var, outer);
640 else
642 tree field = (tree) n->value;
643 /* If the receiver record type was remapped in the child function,
644 remap the field into the new record type. */
645 x = maybe_lookup_field (field, outer);
646 if (x != NULL)
647 field = x;
649 x = build_simple_mem_ref (outer->receiver_decl);
650 x = omp_build_component_ref (x, field);
651 if (use_pointer_for_field (var, outer))
652 x = build_simple_mem_ref (x);
655 else if (outer)
657 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
659 outer = outer->outer;
660 gcc_assert (outer
661 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
663 x = lookup_decl (var, outer);
665 else if (omp_is_reference (var))
666 /* This can happen with orphaned constructs. If var is reference, it is
667 possible it is shared and as such valid. */
668 x = var;
669 else if (omp_member_access_dummy_var (var))
670 x = var;
671 else
672 gcc_unreachable ();
674 if (x == var)
676 tree t = omp_member_access_dummy_var (var);
677 if (t)
679 x = DECL_VALUE_EXPR (var);
680 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
681 if (o != t)
682 x = unshare_and_remap (x, t, o);
683 else
684 x = unshare_expr (x);
688 if (omp_is_reference (var))
689 x = build_simple_mem_ref (x);
691 return x;
694 /* Build tree nodes to access the field for VAR on the sender side. */
696 static tree
697 build_sender_ref (splay_tree_key key, omp_context *ctx)
699 tree field = lookup_sfield (key, ctx);
700 return omp_build_component_ref (ctx->sender_decl, field);
703 static tree
704 build_sender_ref (tree var, omp_context *ctx)
706 return build_sender_ref ((splay_tree_key) var, ctx);
709 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
710 BASE_POINTERS_RESTRICT, declare the field with restrict. */
712 static void
713 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
715 tree field, type, sfield = NULL_TREE;
716 splay_tree_key key = (splay_tree_key) var;
718 if ((mask & 8) != 0)
720 key = (splay_tree_key) &DECL_UID (var);
721 gcc_checking_assert (key != (splay_tree_key) var);
723 gcc_assert ((mask & 1) == 0
724 || !splay_tree_lookup (ctx->field_map, key));
725 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
726 || !splay_tree_lookup (ctx->sfield_map, key));
727 gcc_assert ((mask & 3) == 3
728 || !is_gimple_omp_oacc (ctx->stmt));
730 type = TREE_TYPE (var);
731 /* Prevent redeclaring the var in the split-off function with a restrict
732 pointer type. Note that we only clear type itself, restrict qualifiers in
733 the pointed-to type will be ignored by points-to analysis. */
734 if (POINTER_TYPE_P (type)
735 && TYPE_RESTRICT (type))
736 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
738 if (mask & 4)
740 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
741 type = build_pointer_type (build_pointer_type (type));
743 else if (by_ref)
744 type = build_pointer_type (type);
745 else if ((mask & 3) == 1 && omp_is_reference (var))
746 type = TREE_TYPE (type);
748 field = build_decl (DECL_SOURCE_LOCATION (var),
749 FIELD_DECL, DECL_NAME (var), type);
751 /* Remember what variable this field was created for. This does have a
752 side effect of making dwarf2out ignore this member, so for helpful
753 debugging we clear it later in delete_omp_context. */
754 DECL_ABSTRACT_ORIGIN (field) = var;
755 if (type == TREE_TYPE (var))
757 SET_DECL_ALIGN (field, DECL_ALIGN (var));
758 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
759 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
761 else
762 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
764 if ((mask & 3) == 3)
766 insert_field_into_struct (ctx->record_type, field);
767 if (ctx->srecord_type)
769 sfield = build_decl (DECL_SOURCE_LOCATION (var),
770 FIELD_DECL, DECL_NAME (var), type);
771 DECL_ABSTRACT_ORIGIN (sfield) = var;
772 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
773 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
774 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
775 insert_field_into_struct (ctx->srecord_type, sfield);
778 else
780 if (ctx->srecord_type == NULL_TREE)
782 tree t;
784 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
785 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
786 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
788 sfield = build_decl (DECL_SOURCE_LOCATION (t),
789 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
790 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
791 insert_field_into_struct (ctx->srecord_type, sfield);
792 splay_tree_insert (ctx->sfield_map,
793 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
794 (splay_tree_value) sfield);
797 sfield = field;
798 insert_field_into_struct ((mask & 1) ? ctx->record_type
799 : ctx->srecord_type, field);
802 if (mask & 1)
803 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
804 if ((mask & 2) && ctx->sfield_map)
805 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
808 static tree
809 install_var_local (tree var, omp_context *ctx)
811 tree new_var = omp_copy_decl_1 (var, ctx);
812 insert_decl_map (&ctx->cb, var, new_var);
813 return new_var;
816 /* Adjust the replacement for DECL in CTX for the new context. This means
817 copying the DECL_VALUE_EXPR, and fixing up the type. */
819 static void
820 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
822 tree new_decl, size;
824 new_decl = lookup_decl (decl, ctx);
826 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
828 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
829 && DECL_HAS_VALUE_EXPR_P (decl))
831 tree ve = DECL_VALUE_EXPR (decl);
832 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
833 SET_DECL_VALUE_EXPR (new_decl, ve);
834 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
837 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
839 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
840 if (size == error_mark_node)
841 size = TYPE_SIZE (TREE_TYPE (new_decl));
842 DECL_SIZE (new_decl) = size;
844 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
845 if (size == error_mark_node)
846 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
847 DECL_SIZE_UNIT (new_decl) = size;
851 /* The callback for remap_decl. Search all containing contexts for a
852 mapping of the variable; this avoids having to duplicate the splay
853 tree ahead of time. We know a mapping doesn't already exist in the
854 given context. Create new mappings to implement default semantics. */
856 static tree
857 omp_copy_decl (tree var, copy_body_data *cb)
859 omp_context *ctx = (omp_context *) cb;
860 tree new_var;
862 if (TREE_CODE (var) == LABEL_DECL)
864 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
865 return var;
866 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
867 DECL_CONTEXT (new_var) = current_function_decl;
868 insert_decl_map (&ctx->cb, var, new_var);
869 return new_var;
872 while (!is_taskreg_ctx (ctx))
874 ctx = ctx->outer;
875 if (ctx == NULL)
876 return var;
877 new_var = maybe_lookup_decl (var, ctx);
878 if (new_var)
879 return new_var;
882 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
883 return var;
885 return error_mark_node;
888 /* Create a new context, with OUTER_CTX being the surrounding context. */
890 static omp_context *
891 new_omp_context (gimple *stmt, omp_context *outer_ctx)
893 omp_context *ctx = XCNEW (omp_context);
895 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
896 (splay_tree_value) ctx);
897 ctx->stmt = stmt;
899 if (outer_ctx)
901 ctx->outer = outer_ctx;
902 ctx->cb = outer_ctx->cb;
903 ctx->cb.block = NULL;
904 ctx->depth = outer_ctx->depth + 1;
906 else
908 ctx->cb.src_fn = current_function_decl;
909 ctx->cb.dst_fn = current_function_decl;
910 ctx->cb.src_node = cgraph_node::get (current_function_decl);
911 gcc_checking_assert (ctx->cb.src_node);
912 ctx->cb.dst_node = ctx->cb.src_node;
913 ctx->cb.src_cfun = cfun;
914 ctx->cb.copy_decl = omp_copy_decl;
915 ctx->cb.eh_lp_nr = 0;
916 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
917 ctx->cb.adjust_array_error_bounds = true;
918 ctx->cb.dont_remap_vla_if_no_change = true;
919 ctx->depth = 1;
922 ctx->cb.decl_map = new hash_map<tree, tree>;
924 return ctx;
927 static gimple_seq maybe_catch_exception (gimple_seq);
929 /* Finalize task copyfn. */
931 static void
932 finalize_task_copyfn (gomp_task *task_stmt)
934 struct function *child_cfun;
935 tree child_fn;
936 gimple_seq seq = NULL, new_seq;
937 gbind *bind;
939 child_fn = gimple_omp_task_copy_fn (task_stmt);
940 if (child_fn == NULL_TREE)
941 return;
943 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
944 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
946 push_cfun (child_cfun);
947 bind = gimplify_body (child_fn, false);
948 gimple_seq_add_stmt (&seq, bind);
949 new_seq = maybe_catch_exception (seq);
950 if (new_seq != seq)
952 bind = gimple_build_bind (NULL, new_seq, NULL);
953 seq = NULL;
954 gimple_seq_add_stmt (&seq, bind);
956 gimple_set_body (child_fn, seq);
957 pop_cfun ();
959 /* Inform the callgraph about the new function. */
960 cgraph_node *node = cgraph_node::get_create (child_fn);
961 node->parallelized_function = 1;
962 cgraph_node::add_new_function (child_fn, false);
965 /* Destroy a omp_context data structures. Called through the splay tree
966 value delete callback. */
968 static void
969 delete_omp_context (splay_tree_value value)
971 omp_context *ctx = (omp_context *) value;
973 delete ctx->cb.decl_map;
975 if (ctx->field_map)
976 splay_tree_delete (ctx->field_map);
977 if (ctx->sfield_map)
978 splay_tree_delete (ctx->sfield_map);
980 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
981 it produces corrupt debug information. */
982 if (ctx->record_type)
984 tree t;
985 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
986 DECL_ABSTRACT_ORIGIN (t) = NULL;
988 if (ctx->srecord_type)
990 tree t;
991 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
992 DECL_ABSTRACT_ORIGIN (t) = NULL;
995 if (is_task_ctx (ctx))
996 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
998 if (ctx->task_reduction_map)
1000 ctx->task_reductions.release ();
1001 delete ctx->task_reduction_map;
1004 delete ctx->lastprivate_conditional_map;
1006 XDELETE (ctx);
1009 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1010 context. */
1012 static void
1013 fixup_child_record_type (omp_context *ctx)
1015 tree f, type = ctx->record_type;
1017 if (!ctx->receiver_decl)
1018 return;
1019 /* ??? It isn't sufficient to just call remap_type here, because
1020 variably_modified_type_p doesn't work the way we expect for
1021 record types. Testing each field for whether it needs remapping
1022 and creating a new record by hand works, however. */
1023 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1024 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1025 break;
1026 if (f)
1028 tree name, new_fields = NULL;
1030 type = lang_hooks.types.make_type (RECORD_TYPE);
1031 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1032 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1033 TYPE_DECL, name, type);
1034 TYPE_NAME (type) = name;
1036 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1038 tree new_f = copy_node (f);
1039 DECL_CONTEXT (new_f) = type;
1040 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1041 DECL_CHAIN (new_f) = new_fields;
1042 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1043 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1044 &ctx->cb, NULL);
1045 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1046 &ctx->cb, NULL);
1047 new_fields = new_f;
1049 /* Arrange to be able to look up the receiver field
1050 given the sender field. */
1051 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1052 (splay_tree_value) new_f);
1054 TYPE_FIELDS (type) = nreverse (new_fields);
1055 layout_type (type);
1058 /* In a target region we never modify any of the pointers in *.omp_data_i,
1059 so attempt to help the optimizers. */
1060 if (is_gimple_omp_offloaded (ctx->stmt))
1061 type = build_qualified_type (type, TYPE_QUAL_CONST);
1063 TREE_TYPE (ctx->receiver_decl)
1064 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1067 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1068 specified by CLAUSES. */
1070 static void
1071 scan_sharing_clauses (tree clauses, omp_context *ctx)
1073 tree c, decl;
1074 bool scan_array_reductions = false;
1076 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1078 bool by_ref;
1080 switch (OMP_CLAUSE_CODE (c))
1082 case OMP_CLAUSE_PRIVATE:
1083 decl = OMP_CLAUSE_DECL (c);
1084 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1085 goto do_private;
1086 else if (!is_variable_sized (decl))
1087 install_var_local (decl, ctx);
1088 break;
1090 case OMP_CLAUSE_SHARED:
1091 decl = OMP_CLAUSE_DECL (c);
1092 /* Ignore shared directives in teams construct inside of
1093 target construct. */
1094 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1095 && !is_host_teams_ctx (ctx))
1097 /* Global variables don't need to be copied,
1098 the receiver side will use them directly. */
1099 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1100 if (is_global_var (odecl))
1101 break;
1102 insert_decl_map (&ctx->cb, decl, odecl);
1103 break;
1105 gcc_assert (is_taskreg_ctx (ctx));
1106 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1107 || !is_variable_sized (decl));
1108 /* Global variables don't need to be copied,
1109 the receiver side will use them directly. */
1110 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1111 break;
1112 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1114 use_pointer_for_field (decl, ctx);
1115 break;
1117 by_ref = use_pointer_for_field (decl, NULL);
1118 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1119 || TREE_ADDRESSABLE (decl)
1120 || by_ref
1121 || omp_is_reference (decl))
1123 by_ref = use_pointer_for_field (decl, ctx);
1124 install_var_field (decl, by_ref, 3, ctx);
1125 install_var_local (decl, ctx);
1126 break;
1128 /* We don't need to copy const scalar vars back. */
1129 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1130 goto do_private;
1132 case OMP_CLAUSE_REDUCTION:
1133 case OMP_CLAUSE_IN_REDUCTION:
1134 decl = OMP_CLAUSE_DECL (c);
1135 if (TREE_CODE (decl) == MEM_REF)
1137 tree t = TREE_OPERAND (decl, 0);
1138 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1139 t = TREE_OPERAND (t, 0);
1140 if (TREE_CODE (t) == INDIRECT_REF
1141 || TREE_CODE (t) == ADDR_EXPR)
1142 t = TREE_OPERAND (t, 0);
1143 install_var_local (t, ctx);
1144 if (is_taskreg_ctx (ctx)
1145 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1146 || (is_task_ctx (ctx)
1147 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1148 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1149 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1150 == POINTER_TYPE)))))
1151 && !is_variable_sized (t)
1152 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1153 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1154 && !is_task_ctx (ctx))))
1156 by_ref = use_pointer_for_field (t, NULL);
1157 if (is_task_ctx (ctx)
1158 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1159 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1161 install_var_field (t, false, 1, ctx);
1162 install_var_field (t, by_ref, 2, ctx);
1164 else
1165 install_var_field (t, by_ref, 3, ctx);
1167 break;
1169 if (is_task_ctx (ctx)
1170 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1171 && OMP_CLAUSE_REDUCTION_TASK (c)
1172 && is_parallel_ctx (ctx)))
1174 /* Global variables don't need to be copied,
1175 the receiver side will use them directly. */
1176 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1178 by_ref = use_pointer_for_field (decl, ctx);
1179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1180 install_var_field (decl, by_ref, 3, ctx);
1182 install_var_local (decl, ctx);
1183 break;
1185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1186 && OMP_CLAUSE_REDUCTION_TASK (c))
1188 install_var_local (decl, ctx);
1189 break;
1191 goto do_private;
1193 case OMP_CLAUSE_LASTPRIVATE:
1194 /* Let the corresponding firstprivate clause create
1195 the variable. */
1196 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1197 break;
1198 /* FALLTHRU */
1200 case OMP_CLAUSE_FIRSTPRIVATE:
1201 case OMP_CLAUSE_LINEAR:
1202 decl = OMP_CLAUSE_DECL (c);
1203 do_private:
1204 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1205 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1206 && is_gimple_omp_offloaded (ctx->stmt))
1208 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1209 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1210 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1211 install_var_field (decl, true, 3, ctx);
1212 else
1213 install_var_field (decl, false, 3, ctx);
1215 if (is_variable_sized (decl))
1217 if (is_task_ctx (ctx))
1218 install_var_field (decl, false, 1, ctx);
1219 break;
1221 else if (is_taskreg_ctx (ctx))
1223 bool global
1224 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1225 by_ref = use_pointer_for_field (decl, NULL);
1227 if (is_task_ctx (ctx)
1228 && (global || by_ref || omp_is_reference (decl)))
1230 install_var_field (decl, false, 1, ctx);
1231 if (!global)
1232 install_var_field (decl, by_ref, 2, ctx);
1234 else if (!global)
1235 install_var_field (decl, by_ref, 3, ctx);
1237 install_var_local (decl, ctx);
1238 break;
1240 case OMP_CLAUSE_USE_DEVICE_PTR:
1241 case OMP_CLAUSE_USE_DEVICE_ADDR:
1242 decl = OMP_CLAUSE_DECL (c);
1243 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1244 && !omp_is_reference (decl))
1245 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1246 install_var_field (decl, true, 11, ctx);
1247 else
1248 install_var_field (decl, false, 11, ctx);
1249 if (DECL_SIZE (decl)
1250 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1252 tree decl2 = DECL_VALUE_EXPR (decl);
1253 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1254 decl2 = TREE_OPERAND (decl2, 0);
1255 gcc_assert (DECL_P (decl2));
1256 install_var_local (decl2, ctx);
1258 install_var_local (decl, ctx);
1259 break;
1261 case OMP_CLAUSE_IS_DEVICE_PTR:
1262 decl = OMP_CLAUSE_DECL (c);
1263 goto do_private;
1265 case OMP_CLAUSE__LOOPTEMP_:
1266 case OMP_CLAUSE__REDUCTEMP_:
1267 gcc_assert (is_taskreg_ctx (ctx));
1268 decl = OMP_CLAUSE_DECL (c);
1269 install_var_field (decl, false, 3, ctx);
1270 install_var_local (decl, ctx);
1271 break;
1273 case OMP_CLAUSE_COPYPRIVATE:
1274 case OMP_CLAUSE_COPYIN:
1275 decl = OMP_CLAUSE_DECL (c);
1276 by_ref = use_pointer_for_field (decl, NULL);
1277 install_var_field (decl, by_ref, 3, ctx);
1278 break;
1280 case OMP_CLAUSE_FINAL:
1281 case OMP_CLAUSE_IF:
1282 case OMP_CLAUSE_NUM_THREADS:
1283 case OMP_CLAUSE_NUM_TEAMS:
1284 case OMP_CLAUSE_THREAD_LIMIT:
1285 case OMP_CLAUSE_DEVICE:
1286 case OMP_CLAUSE_SCHEDULE:
1287 case OMP_CLAUSE_DIST_SCHEDULE:
1288 case OMP_CLAUSE_DEPEND:
1289 case OMP_CLAUSE_PRIORITY:
1290 case OMP_CLAUSE_GRAINSIZE:
1291 case OMP_CLAUSE_NUM_TASKS:
1292 case OMP_CLAUSE_NUM_GANGS:
1293 case OMP_CLAUSE_NUM_WORKERS:
1294 case OMP_CLAUSE_VECTOR_LENGTH:
1295 if (ctx->outer)
1296 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1297 break;
1299 case OMP_CLAUSE_TO:
1300 case OMP_CLAUSE_FROM:
1301 case OMP_CLAUSE_MAP:
1302 if (ctx->outer)
1303 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1304 decl = OMP_CLAUSE_DECL (c);
1305 /* Global variables with "omp declare target" attribute
1306 don't need to be copied, the receiver side will use them
1307 directly. However, global variables with "omp declare target link"
1308 attribute need to be copied. Or when ALWAYS modifier is used. */
1309 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1310 && DECL_P (decl)
1311 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1312 && (OMP_CLAUSE_MAP_KIND (c)
1313 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1314 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1315 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1316 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1317 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1318 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1319 && varpool_node::get_create (decl)->offloadable
1320 && !lookup_attribute ("omp declare target link",
1321 DECL_ATTRIBUTES (decl)))
1322 break;
1323 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1324 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1326 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1327 not offloaded; there is nothing to map for those. */
1328 if (!is_gimple_omp_offloaded (ctx->stmt)
1329 && !POINTER_TYPE_P (TREE_TYPE (decl))
1330 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1331 break;
1333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1334 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1335 || (OMP_CLAUSE_MAP_KIND (c)
1336 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1338 if (TREE_CODE (decl) == COMPONENT_REF
1339 || (TREE_CODE (decl) == INDIRECT_REF
1340 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1341 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1342 == REFERENCE_TYPE)))
1343 break;
1344 if (DECL_SIZE (decl)
1345 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1347 tree decl2 = DECL_VALUE_EXPR (decl);
1348 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1349 decl2 = TREE_OPERAND (decl2, 0);
1350 gcc_assert (DECL_P (decl2));
1351 install_var_local (decl2, ctx);
1353 install_var_local (decl, ctx);
1354 break;
1356 if (DECL_P (decl))
1358 if (DECL_SIZE (decl)
1359 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1361 tree decl2 = DECL_VALUE_EXPR (decl);
1362 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1363 decl2 = TREE_OPERAND (decl2, 0);
1364 gcc_assert (DECL_P (decl2));
1365 install_var_field (decl2, true, 3, ctx);
1366 install_var_local (decl2, ctx);
1367 install_var_local (decl, ctx);
1369 else
1371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1372 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1373 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1374 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1375 install_var_field (decl, true, 7, ctx);
1376 else
1377 install_var_field (decl, true, 3, ctx);
1378 if (is_gimple_omp_offloaded (ctx->stmt)
1379 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1380 install_var_local (decl, ctx);
1383 else
1385 tree base = get_base_address (decl);
1386 tree nc = OMP_CLAUSE_CHAIN (c);
1387 if (DECL_P (base)
1388 && nc != NULL_TREE
1389 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1390 && OMP_CLAUSE_DECL (nc) == base
1391 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1392 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1394 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1395 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1397 else
1399 if (ctx->outer)
1401 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1402 decl = OMP_CLAUSE_DECL (c);
1404 gcc_assert (!splay_tree_lookup (ctx->field_map,
1405 (splay_tree_key) decl));
1406 tree field
1407 = build_decl (OMP_CLAUSE_LOCATION (c),
1408 FIELD_DECL, NULL_TREE, ptr_type_node);
1409 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1410 insert_field_into_struct (ctx->record_type, field);
1411 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1412 (splay_tree_value) field);
1415 break;
1417 case OMP_CLAUSE__GRIDDIM_:
1418 if (ctx->outer)
1420 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1421 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1423 break;
1425 case OMP_CLAUSE_ORDER:
1426 ctx->order_concurrent = true;
1427 break;
1429 case OMP_CLAUSE_BIND:
1430 ctx->loop_p = true;
1431 break;
1433 case OMP_CLAUSE_NOWAIT:
1434 case OMP_CLAUSE_ORDERED:
1435 case OMP_CLAUSE_COLLAPSE:
1436 case OMP_CLAUSE_UNTIED:
1437 case OMP_CLAUSE_MERGEABLE:
1438 case OMP_CLAUSE_PROC_BIND:
1439 case OMP_CLAUSE_SAFELEN:
1440 case OMP_CLAUSE_SIMDLEN:
1441 case OMP_CLAUSE_THREADS:
1442 case OMP_CLAUSE_SIMD:
1443 case OMP_CLAUSE_NOGROUP:
1444 case OMP_CLAUSE_DEFAULTMAP:
1445 case OMP_CLAUSE_ASYNC:
1446 case OMP_CLAUSE_WAIT:
1447 case OMP_CLAUSE_GANG:
1448 case OMP_CLAUSE_WORKER:
1449 case OMP_CLAUSE_VECTOR:
1450 case OMP_CLAUSE_INDEPENDENT:
1451 case OMP_CLAUSE_AUTO:
1452 case OMP_CLAUSE_SEQ:
1453 case OMP_CLAUSE_TILE:
1454 case OMP_CLAUSE__SIMT_:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_NONTEMPORAL:
1457 case OMP_CLAUSE_IF_PRESENT:
1458 case OMP_CLAUSE_FINALIZE:
1459 case OMP_CLAUSE_TASK_REDUCTION:
1460 break;
1462 case OMP_CLAUSE_ALIGNED:
1463 decl = OMP_CLAUSE_DECL (c);
1464 if (is_global_var (decl)
1465 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1466 install_var_local (decl, ctx);
1467 break;
1469 case OMP_CLAUSE__CONDTEMP_:
1470 decl = OMP_CLAUSE_DECL (c);
1471 if (is_parallel_ctx (ctx))
1473 install_var_field (decl, false, 3, ctx);
1474 install_var_local (decl, ctx);
1476 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1477 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1478 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1479 install_var_local (decl, ctx);
1480 break;
1482 case OMP_CLAUSE__CACHE_:
1483 default:
1484 gcc_unreachable ();
1488 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1490 switch (OMP_CLAUSE_CODE (c))
1492 case OMP_CLAUSE_LASTPRIVATE:
1493 /* Let the corresponding firstprivate clause create
1494 the variable. */
1495 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1496 scan_array_reductions = true;
1497 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1498 break;
1499 /* FALLTHRU */
1501 case OMP_CLAUSE_FIRSTPRIVATE:
1502 case OMP_CLAUSE_PRIVATE:
1503 case OMP_CLAUSE_LINEAR:
1504 case OMP_CLAUSE_IS_DEVICE_PTR:
1505 decl = OMP_CLAUSE_DECL (c);
1506 if (is_variable_sized (decl))
1508 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1509 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1510 && is_gimple_omp_offloaded (ctx->stmt))
1512 tree decl2 = DECL_VALUE_EXPR (decl);
1513 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1514 decl2 = TREE_OPERAND (decl2, 0);
1515 gcc_assert (DECL_P (decl2));
1516 install_var_local (decl2, ctx);
1517 fixup_remapped_decl (decl2, ctx, false);
1519 install_var_local (decl, ctx);
1521 fixup_remapped_decl (decl, ctx,
1522 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1523 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1524 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1526 scan_array_reductions = true;
1527 break;
1529 case OMP_CLAUSE_REDUCTION:
1530 case OMP_CLAUSE_IN_REDUCTION:
1531 decl = OMP_CLAUSE_DECL (c);
1532 if (TREE_CODE (decl) != MEM_REF)
1534 if (is_variable_sized (decl))
1535 install_var_local (decl, ctx);
1536 fixup_remapped_decl (decl, ctx, false);
1538 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1539 scan_array_reductions = true;
1540 break;
1542 case OMP_CLAUSE_TASK_REDUCTION:
1543 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1544 scan_array_reductions = true;
1545 break;
1547 case OMP_CLAUSE_SHARED:
1548 /* Ignore shared directives in teams construct inside of
1549 target construct. */
1550 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1551 && !is_host_teams_ctx (ctx))
1552 break;
1553 decl = OMP_CLAUSE_DECL (c);
1554 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1555 break;
1556 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1558 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1559 ctx->outer)))
1560 break;
1561 bool by_ref = use_pointer_for_field (decl, ctx);
1562 install_var_field (decl, by_ref, 11, ctx);
1563 break;
1565 fixup_remapped_decl (decl, ctx, false);
1566 break;
1568 case OMP_CLAUSE_MAP:
1569 if (!is_gimple_omp_offloaded (ctx->stmt))
1570 break;
1571 decl = OMP_CLAUSE_DECL (c);
1572 if (DECL_P (decl)
1573 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1574 && (OMP_CLAUSE_MAP_KIND (c)
1575 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1576 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1577 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1578 && varpool_node::get_create (decl)->offloadable)
1579 break;
1580 if (DECL_P (decl))
1582 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1583 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1584 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1585 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1587 tree new_decl = lookup_decl (decl, ctx);
1588 TREE_TYPE (new_decl)
1589 = remap_type (TREE_TYPE (decl), &ctx->cb);
1591 else if (DECL_SIZE (decl)
1592 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1594 tree decl2 = DECL_VALUE_EXPR (decl);
1595 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1596 decl2 = TREE_OPERAND (decl2, 0);
1597 gcc_assert (DECL_P (decl2));
1598 fixup_remapped_decl (decl2, ctx, false);
1599 fixup_remapped_decl (decl, ctx, true);
1601 else
1602 fixup_remapped_decl (decl, ctx, false);
1604 break;
1606 case OMP_CLAUSE_COPYPRIVATE:
1607 case OMP_CLAUSE_COPYIN:
1608 case OMP_CLAUSE_DEFAULT:
1609 case OMP_CLAUSE_IF:
1610 case OMP_CLAUSE_NUM_THREADS:
1611 case OMP_CLAUSE_NUM_TEAMS:
1612 case OMP_CLAUSE_THREAD_LIMIT:
1613 case OMP_CLAUSE_DEVICE:
1614 case OMP_CLAUSE_SCHEDULE:
1615 case OMP_CLAUSE_DIST_SCHEDULE:
1616 case OMP_CLAUSE_NOWAIT:
1617 case OMP_CLAUSE_ORDERED:
1618 case OMP_CLAUSE_COLLAPSE:
1619 case OMP_CLAUSE_UNTIED:
1620 case OMP_CLAUSE_FINAL:
1621 case OMP_CLAUSE_MERGEABLE:
1622 case OMP_CLAUSE_PROC_BIND:
1623 case OMP_CLAUSE_SAFELEN:
1624 case OMP_CLAUSE_SIMDLEN:
1625 case OMP_CLAUSE_ALIGNED:
1626 case OMP_CLAUSE_DEPEND:
1627 case OMP_CLAUSE__LOOPTEMP_:
1628 case OMP_CLAUSE__REDUCTEMP_:
1629 case OMP_CLAUSE_TO:
1630 case OMP_CLAUSE_FROM:
1631 case OMP_CLAUSE_PRIORITY:
1632 case OMP_CLAUSE_GRAINSIZE:
1633 case OMP_CLAUSE_NUM_TASKS:
1634 case OMP_CLAUSE_THREADS:
1635 case OMP_CLAUSE_SIMD:
1636 case OMP_CLAUSE_NOGROUP:
1637 case OMP_CLAUSE_DEFAULTMAP:
1638 case OMP_CLAUSE_ORDER:
1639 case OMP_CLAUSE_BIND:
1640 case OMP_CLAUSE_USE_DEVICE_PTR:
1641 case OMP_CLAUSE_USE_DEVICE_ADDR:
1642 case OMP_CLAUSE_NONTEMPORAL:
1643 case OMP_CLAUSE_ASYNC:
1644 case OMP_CLAUSE_WAIT:
1645 case OMP_CLAUSE_NUM_GANGS:
1646 case OMP_CLAUSE_NUM_WORKERS:
1647 case OMP_CLAUSE_VECTOR_LENGTH:
1648 case OMP_CLAUSE_GANG:
1649 case OMP_CLAUSE_WORKER:
1650 case OMP_CLAUSE_VECTOR:
1651 case OMP_CLAUSE_INDEPENDENT:
1652 case OMP_CLAUSE_AUTO:
1653 case OMP_CLAUSE_SEQ:
1654 case OMP_CLAUSE_TILE:
1655 case OMP_CLAUSE__GRIDDIM_:
1656 case OMP_CLAUSE__SIMT_:
1657 case OMP_CLAUSE_IF_PRESENT:
1658 case OMP_CLAUSE_FINALIZE:
1659 case OMP_CLAUSE__CONDTEMP_:
1660 break;
1662 case OMP_CLAUSE__CACHE_:
1663 default:
1664 gcc_unreachable ();
1668 gcc_checking_assert (!scan_array_reductions
1669 || !is_gimple_omp_oacc (ctx->stmt));
1670 if (scan_array_reductions)
1672 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1673 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1674 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1675 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1676 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1678 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1679 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1681 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1682 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1683 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1684 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1685 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1686 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1690 /* Create a new name for omp child function. Returns an identifier. */
1692 static tree
1693 create_omp_child_function_name (bool task_copy)
1695 return clone_function_name_numbered (current_function_decl,
1696 task_copy ? "_omp_cpyfn" : "_omp_fn");
1699 /* Return true if CTX may belong to offloaded code: either if current function
1700 is offloaded, or any enclosing context corresponds to a target region. */
1702 static bool
1703 omp_maybe_offloaded_ctx (omp_context *ctx)
1705 if (cgraph_node::get (current_function_decl)->offloadable)
1706 return true;
1707 for (; ctx; ctx = ctx->outer)
1708 if (is_gimple_omp_offloaded (ctx->stmt))
1709 return true;
1710 return false;
1713 /* Build a decl for the omp child function. It'll not contain a body
1714 yet, just the bare decl. */
1716 static void
1717 create_omp_child_function (omp_context *ctx, bool task_copy)
1719 tree decl, type, name, t;
1721 name = create_omp_child_function_name (task_copy);
1722 if (task_copy)
1723 type = build_function_type_list (void_type_node, ptr_type_node,
1724 ptr_type_node, NULL_TREE);
1725 else
1726 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1728 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1730 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1731 || !task_copy);
1732 if (!task_copy)
1733 ctx->cb.dst_fn = decl;
1734 else
1735 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1737 TREE_STATIC (decl) = 1;
1738 TREE_USED (decl) = 1;
1739 DECL_ARTIFICIAL (decl) = 1;
1740 DECL_IGNORED_P (decl) = 0;
1741 TREE_PUBLIC (decl) = 0;
1742 DECL_UNINLINABLE (decl) = 1;
1743 DECL_EXTERNAL (decl) = 0;
1744 DECL_CONTEXT (decl) = NULL_TREE;
1745 DECL_INITIAL (decl) = make_node (BLOCK);
1746 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1747 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1748 /* Remove omp declare simd attribute from the new attributes. */
1749 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1751 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1752 a = a2;
1753 a = TREE_CHAIN (a);
1754 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1755 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1756 *p = TREE_CHAIN (*p);
1757 else
1759 tree chain = TREE_CHAIN (*p);
1760 *p = copy_node (*p);
1761 p = &TREE_CHAIN (*p);
1762 *p = chain;
1765 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1766 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1767 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1768 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1769 DECL_FUNCTION_VERSIONED (decl)
1770 = DECL_FUNCTION_VERSIONED (current_function_decl);
1772 if (omp_maybe_offloaded_ctx (ctx))
1774 cgraph_node::get_create (decl)->offloadable = 1;
1775 if (ENABLE_OFFLOADING)
1776 g->have_offload = true;
1779 if (cgraph_node::get_create (decl)->offloadable
1780 && !lookup_attribute ("omp declare target",
1781 DECL_ATTRIBUTES (current_function_decl)))
1783 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1784 ? "omp target entrypoint"
1785 : "omp declare target");
1786 DECL_ATTRIBUTES (decl)
1787 = tree_cons (get_identifier (target_attr),
1788 NULL_TREE, DECL_ATTRIBUTES (decl));
1791 t = build_decl (DECL_SOURCE_LOCATION (decl),
1792 RESULT_DECL, NULL_TREE, void_type_node);
1793 DECL_ARTIFICIAL (t) = 1;
1794 DECL_IGNORED_P (t) = 1;
1795 DECL_CONTEXT (t) = decl;
1796 DECL_RESULT (decl) = t;
1798 tree data_name = get_identifier (".omp_data_i");
1799 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1800 ptr_type_node);
1801 DECL_ARTIFICIAL (t) = 1;
1802 DECL_NAMELESS (t) = 1;
1803 DECL_ARG_TYPE (t) = ptr_type_node;
1804 DECL_CONTEXT (t) = current_function_decl;
1805 TREE_USED (t) = 1;
1806 TREE_READONLY (t) = 1;
1807 DECL_ARGUMENTS (decl) = t;
1808 if (!task_copy)
1809 ctx->receiver_decl = t;
1810 else
1812 t = build_decl (DECL_SOURCE_LOCATION (decl),
1813 PARM_DECL, get_identifier (".omp_data_o"),
1814 ptr_type_node);
1815 DECL_ARTIFICIAL (t) = 1;
1816 DECL_NAMELESS (t) = 1;
1817 DECL_ARG_TYPE (t) = ptr_type_node;
1818 DECL_CONTEXT (t) = current_function_decl;
1819 TREE_USED (t) = 1;
1820 TREE_ADDRESSABLE (t) = 1;
1821 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1822 DECL_ARGUMENTS (decl) = t;
1825 /* Allocate memory for the function structure. The call to
1826 allocate_struct_function clobbers CFUN, so we need to restore
1827 it afterward. */
1828 push_struct_function (decl);
1829 cfun->function_end_locus = gimple_location (ctx->stmt);
1830 init_tree_ssa (cfun);
1831 pop_cfun ();
1834 /* Callback for walk_gimple_seq. Check if combined parallel
1835 contains gimple_omp_for_combined_into_p OMP_FOR. */
1837 tree
1838 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1839 bool *handled_ops_p,
1840 struct walk_stmt_info *wi)
1842 gimple *stmt = gsi_stmt (*gsi_p);
1844 *handled_ops_p = true;
1845 switch (gimple_code (stmt))
1847 WALK_SUBSTMTS;
1849 case GIMPLE_OMP_FOR:
1850 if (gimple_omp_for_combined_into_p (stmt)
1851 && gimple_omp_for_kind (stmt)
1852 == *(const enum gf_mask *) (wi->info))
1854 wi->info = stmt;
1855 return integer_zero_node;
1857 break;
1858 default:
1859 break;
1861 return NULL;
1864 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1866 static void
1867 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1868 omp_context *outer_ctx)
1870 struct walk_stmt_info wi;
1872 memset (&wi, 0, sizeof (wi));
1873 wi.val_only = true;
1874 wi.info = (void *) &msk;
1875 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1876 if (wi.info != (void *) &msk)
1878 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1879 struct omp_for_data fd;
1880 omp_extract_for_data (for_stmt, &fd, NULL);
1881 /* We need two temporaries with fd.loop.v type (istart/iend)
1882 and then (fd.collapse - 1) temporaries with the same
1883 type for count2 ... countN-1 vars if not constant. */
1884 size_t count = 2, i;
1885 tree type = fd.iter_type;
1886 if (fd.collapse > 1
1887 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1889 count += fd.collapse - 1;
1890 /* If there are lastprivate clauses on the inner
1891 GIMPLE_OMP_FOR, add one more temporaries for the total number
1892 of iterations (product of count1 ... countN-1). */
1893 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1894 OMP_CLAUSE_LASTPRIVATE))
1895 count++;
1896 else if (msk == GF_OMP_FOR_KIND_FOR
1897 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1898 OMP_CLAUSE_LASTPRIVATE))
1899 count++;
1901 for (i = 0; i < count; i++)
1903 tree temp = create_tmp_var (type);
1904 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1905 insert_decl_map (&outer_ctx->cb, temp, temp);
1906 OMP_CLAUSE_DECL (c) = temp;
1907 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1908 gimple_omp_taskreg_set_clauses (stmt, c);
1911 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1912 && omp_find_clause (gimple_omp_task_clauses (stmt),
1913 OMP_CLAUSE_REDUCTION))
1915 tree type = build_pointer_type (pointer_sized_int_node);
1916 tree temp = create_tmp_var (type);
1917 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1918 insert_decl_map (&outer_ctx->cb, temp, temp);
1919 OMP_CLAUSE_DECL (c) = temp;
1920 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1921 gimple_omp_task_set_clauses (stmt, c);
1925 /* Scan an OpenMP parallel directive. */
1927 static void
1928 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1930 omp_context *ctx;
1931 tree name;
1932 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1934 /* Ignore parallel directives with empty bodies, unless there
1935 are copyin clauses. */
1936 if (optimize > 0
1937 && empty_body_p (gimple_omp_body (stmt))
1938 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1939 OMP_CLAUSE_COPYIN) == NULL)
1941 gsi_replace (gsi, gimple_build_nop (), false);
1942 return;
1945 if (gimple_omp_parallel_combined_p (stmt))
1946 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1947 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1948 OMP_CLAUSE_REDUCTION);
1949 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1950 if (OMP_CLAUSE_REDUCTION_TASK (c))
1952 tree type = build_pointer_type (pointer_sized_int_node);
1953 tree temp = create_tmp_var (type);
1954 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1955 if (outer_ctx)
1956 insert_decl_map (&outer_ctx->cb, temp, temp);
1957 OMP_CLAUSE_DECL (c) = temp;
1958 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1959 gimple_omp_parallel_set_clauses (stmt, c);
1960 break;
1962 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1963 break;
1965 ctx = new_omp_context (stmt, outer_ctx);
1966 taskreg_contexts.safe_push (ctx);
1967 if (taskreg_nesting_level > 1)
1968 ctx->is_nested = true;
1969 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1970 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1971 name = create_tmp_var_name (".omp_data_s");
1972 name = build_decl (gimple_location (stmt),
1973 TYPE_DECL, name, ctx->record_type);
1974 DECL_ARTIFICIAL (name) = 1;
1975 DECL_NAMELESS (name) = 1;
1976 TYPE_NAME (ctx->record_type) = name;
1977 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1978 if (!gimple_omp_parallel_grid_phony (stmt))
1980 create_omp_child_function (ctx, false);
1981 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1984 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1985 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1987 if (TYPE_FIELDS (ctx->record_type) == NULL)
1988 ctx->record_type = ctx->receiver_decl = NULL;
1991 /* Scan an OpenMP task directive. */
1993 static void
1994 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1996 omp_context *ctx;
1997 tree name, t;
1998 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2000 /* Ignore task directives with empty bodies, unless they have depend
2001 clause. */
2002 if (optimize > 0
2003 && gimple_omp_body (stmt)
2004 && empty_body_p (gimple_omp_body (stmt))
2005 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2007 gsi_replace (gsi, gimple_build_nop (), false);
2008 return;
2011 if (gimple_omp_task_taskloop_p (stmt))
2012 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2014 ctx = new_omp_context (stmt, outer_ctx);
2016 if (gimple_omp_task_taskwait_p (stmt))
2018 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2019 return;
2022 taskreg_contexts.safe_push (ctx);
2023 if (taskreg_nesting_level > 1)
2024 ctx->is_nested = true;
2025 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2026 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2027 name = create_tmp_var_name (".omp_data_s");
2028 name = build_decl (gimple_location (stmt),
2029 TYPE_DECL, name, ctx->record_type);
2030 DECL_ARTIFICIAL (name) = 1;
2031 DECL_NAMELESS (name) = 1;
2032 TYPE_NAME (ctx->record_type) = name;
2033 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2034 create_omp_child_function (ctx, false);
2035 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2037 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2039 if (ctx->srecord_type)
2041 name = create_tmp_var_name (".omp_data_a");
2042 name = build_decl (gimple_location (stmt),
2043 TYPE_DECL, name, ctx->srecord_type);
2044 DECL_ARTIFICIAL (name) = 1;
2045 DECL_NAMELESS (name) = 1;
2046 TYPE_NAME (ctx->srecord_type) = name;
2047 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2048 create_omp_child_function (ctx, true);
2051 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2053 if (TYPE_FIELDS (ctx->record_type) == NULL)
2055 ctx->record_type = ctx->receiver_decl = NULL;
2056 t = build_int_cst (long_integer_type_node, 0);
2057 gimple_omp_task_set_arg_size (stmt, t);
2058 t = build_int_cst (long_integer_type_node, 1);
2059 gimple_omp_task_set_arg_align (stmt, t);
2063 /* Helper function for finish_taskreg_scan, called through walk_tree.
2064 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2065 tree, replace it in the expression. */
2067 static tree
2068 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2070 if (VAR_P (*tp))
2072 omp_context *ctx = (omp_context *) data;
2073 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2074 if (t != *tp)
2076 if (DECL_HAS_VALUE_EXPR_P (t))
2077 t = unshare_expr (DECL_VALUE_EXPR (t));
2078 *tp = t;
2080 *walk_subtrees = 0;
2082 else if (IS_TYPE_OR_DECL_P (*tp))
2083 *walk_subtrees = 0;
2084 return NULL_TREE;
2087 /* If any decls have been made addressable during scan_omp,
2088 adjust their fields if needed, and layout record types
2089 of parallel/task constructs. */
2091 static void
2092 finish_taskreg_scan (omp_context *ctx)
2094 if (ctx->record_type == NULL_TREE)
2095 return;
2097 /* If any task_shared_vars were needed, verify all
2098 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2099 statements if use_pointer_for_field hasn't changed
2100 because of that. If it did, update field types now. */
2101 if (task_shared_vars)
2103 tree c;
2105 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2106 c; c = OMP_CLAUSE_CHAIN (c))
2107 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2108 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2110 tree decl = OMP_CLAUSE_DECL (c);
2112 /* Global variables don't need to be copied,
2113 the receiver side will use them directly. */
2114 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2115 continue;
2116 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2117 || !use_pointer_for_field (decl, ctx))
2118 continue;
2119 tree field = lookup_field (decl, ctx);
2120 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2121 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2122 continue;
2123 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2124 TREE_THIS_VOLATILE (field) = 0;
2125 DECL_USER_ALIGN (field) = 0;
2126 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2127 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2128 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2129 if (ctx->srecord_type)
2131 tree sfield = lookup_sfield (decl, ctx);
2132 TREE_TYPE (sfield) = TREE_TYPE (field);
2133 TREE_THIS_VOLATILE (sfield) = 0;
2134 DECL_USER_ALIGN (sfield) = 0;
2135 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2136 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2137 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2142 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2144 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2145 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2146 if (c)
2148 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2149 expects to find it at the start of data. */
2150 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2151 tree *p = &TYPE_FIELDS (ctx->record_type);
2152 while (*p)
2153 if (*p == f)
2155 *p = DECL_CHAIN (*p);
2156 break;
2158 else
2159 p = &DECL_CHAIN (*p);
2160 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2161 TYPE_FIELDS (ctx->record_type) = f;
2163 layout_type (ctx->record_type);
2164 fixup_child_record_type (ctx);
2166 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2168 layout_type (ctx->record_type);
2169 fixup_child_record_type (ctx);
2171 else
2173 location_t loc = gimple_location (ctx->stmt);
2174 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2175 /* Move VLA fields to the end. */
2176 p = &TYPE_FIELDS (ctx->record_type);
2177 while (*p)
2178 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2179 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2181 *q = *p;
2182 *p = TREE_CHAIN (*p);
2183 TREE_CHAIN (*q) = NULL_TREE;
2184 q = &TREE_CHAIN (*q);
2186 else
2187 p = &DECL_CHAIN (*p);
2188 *p = vla_fields;
2189 if (gimple_omp_task_taskloop_p (ctx->stmt))
2191 /* Move fields corresponding to first and second _looptemp_
2192 clause first. There are filled by GOMP_taskloop
2193 and thus need to be in specific positions. */
2194 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2195 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2196 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2197 OMP_CLAUSE__LOOPTEMP_);
2198 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2199 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2200 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2201 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2202 p = &TYPE_FIELDS (ctx->record_type);
2203 while (*p)
2204 if (*p == f1 || *p == f2 || *p == f3)
2205 *p = DECL_CHAIN (*p);
2206 else
2207 p = &DECL_CHAIN (*p);
2208 DECL_CHAIN (f1) = f2;
2209 if (c3)
2211 DECL_CHAIN (f2) = f3;
2212 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2214 else
2215 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2216 TYPE_FIELDS (ctx->record_type) = f1;
2217 if (ctx->srecord_type)
2219 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2220 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2221 if (c3)
2222 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2223 p = &TYPE_FIELDS (ctx->srecord_type);
2224 while (*p)
2225 if (*p == f1 || *p == f2 || *p == f3)
2226 *p = DECL_CHAIN (*p);
2227 else
2228 p = &DECL_CHAIN (*p);
2229 DECL_CHAIN (f1) = f2;
2230 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2231 if (c3)
2233 DECL_CHAIN (f2) = f3;
2234 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2236 else
2237 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2238 TYPE_FIELDS (ctx->srecord_type) = f1;
2241 layout_type (ctx->record_type);
2242 fixup_child_record_type (ctx);
2243 if (ctx->srecord_type)
2244 layout_type (ctx->srecord_type);
2245 tree t = fold_convert_loc (loc, long_integer_type_node,
2246 TYPE_SIZE_UNIT (ctx->record_type));
2247 if (TREE_CODE (t) != INTEGER_CST)
2249 t = unshare_expr (t);
2250 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2252 gimple_omp_task_set_arg_size (ctx->stmt, t);
2253 t = build_int_cst (long_integer_type_node,
2254 TYPE_ALIGN_UNIT (ctx->record_type));
2255 gimple_omp_task_set_arg_align (ctx->stmt, t);
2259 /* Find the enclosing offload context. */
2261 static omp_context *
2262 enclosing_target_ctx (omp_context *ctx)
2264 for (; ctx; ctx = ctx->outer)
2265 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2266 break;
2268 return ctx;
2271 /* Return true if ctx is part of an oacc kernels region. */
2273 static bool
2274 ctx_in_oacc_kernels_region (omp_context *ctx)
2276 for (;ctx != NULL; ctx = ctx->outer)
2278 gimple *stmt = ctx->stmt;
2279 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2280 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2281 return true;
2284 return false;
2287 /* Check the parallelism clauses inside a kernels regions.
2288 Until kernels handling moves to use the same loop indirection
2289 scheme as parallel, we need to do this checking early. */
2291 static unsigned
2292 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2294 bool checking = true;
2295 unsigned outer_mask = 0;
2296 unsigned this_mask = 0;
2297 bool has_seq = false, has_auto = false;
2299 if (ctx->outer)
2300 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2301 if (!stmt)
2303 checking = false;
2304 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2305 return outer_mask;
2306 stmt = as_a <gomp_for *> (ctx->stmt);
2309 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2311 switch (OMP_CLAUSE_CODE (c))
2313 case OMP_CLAUSE_GANG:
2314 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2315 break;
2316 case OMP_CLAUSE_WORKER:
2317 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2318 break;
2319 case OMP_CLAUSE_VECTOR:
2320 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2321 break;
2322 case OMP_CLAUSE_SEQ:
2323 has_seq = true;
2324 break;
2325 case OMP_CLAUSE_AUTO:
2326 has_auto = true;
2327 break;
2328 default:
2329 break;
2333 if (checking)
2335 if (has_seq && (this_mask || has_auto))
2336 error_at (gimple_location (stmt), "%<seq%> overrides other"
2337 " OpenACC loop specifiers");
2338 else if (has_auto && this_mask)
2339 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2340 " OpenACC loop specifiers");
2342 if (this_mask & outer_mask)
2343 error_at (gimple_location (stmt), "inner loop uses same"
2344 " OpenACC parallelism as containing loop");
2347 return outer_mask | this_mask;
2350 /* Scan a GIMPLE_OMP_FOR. */
2352 static omp_context *
2353 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2355 omp_context *ctx;
2356 size_t i;
2357 tree clauses = gimple_omp_for_clauses (stmt);
2359 ctx = new_omp_context (stmt, outer_ctx);
2361 if (is_gimple_omp_oacc (stmt))
2363 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2365 if (!tgt || is_oacc_parallel (tgt))
2366 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2368 char const *check = NULL;
2370 switch (OMP_CLAUSE_CODE (c))
2372 case OMP_CLAUSE_GANG:
2373 check = "gang";
2374 break;
2376 case OMP_CLAUSE_WORKER:
2377 check = "worker";
2378 break;
2380 case OMP_CLAUSE_VECTOR:
2381 check = "vector";
2382 break;
2384 default:
2385 break;
2388 if (check && OMP_CLAUSE_OPERAND (c, 0))
2389 error_at (gimple_location (stmt),
2390 "argument not permitted on %qs clause in"
2391 " OpenACC %<parallel%>", check);
2394 if (tgt && is_oacc_kernels (tgt))
2396 /* Strip out reductions, as they are not handled yet. */
2397 tree *prev_ptr = &clauses;
2399 while (tree probe = *prev_ptr)
2401 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2403 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2404 *prev_ptr = *next_ptr;
2405 else
2406 prev_ptr = next_ptr;
2409 gimple_omp_for_set_clauses (stmt, clauses);
2410 check_oacc_kernel_gwv (stmt, ctx);
2414 scan_sharing_clauses (clauses, ctx);
2416 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2417 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2419 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2420 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2421 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2422 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2424 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2425 return ctx;
2428 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2430 static void
2431 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2432 omp_context *outer_ctx)
2434 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2435 gsi_replace (gsi, bind, false);
2436 gimple_seq seq = NULL;
2437 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2438 tree cond = create_tmp_var_raw (integer_type_node);
2439 DECL_CONTEXT (cond) = current_function_decl;
2440 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2441 gimple_bind_set_vars (bind, cond);
2442 gimple_call_set_lhs (g, cond);
2443 gimple_seq_add_stmt (&seq, g);
2444 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2445 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2446 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2447 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2448 gimple_seq_add_stmt (&seq, g);
2449 g = gimple_build_label (lab1);
2450 gimple_seq_add_stmt (&seq, g);
2451 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2452 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2453 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2454 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2455 gimple_omp_for_set_clauses (new_stmt, clause);
2456 gimple_seq_add_stmt (&seq, new_stmt);
2457 g = gimple_build_goto (lab3);
2458 gimple_seq_add_stmt (&seq, g);
2459 g = gimple_build_label (lab2);
2460 gimple_seq_add_stmt (&seq, g);
2461 gimple_seq_add_stmt (&seq, stmt);
2462 g = gimple_build_label (lab3);
2463 gimple_seq_add_stmt (&seq, g);
2464 gimple_bind_set_body (bind, seq);
2465 update_stmt (bind);
2466 scan_omp_for (new_stmt, outer_ctx);
2467 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2470 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2471 struct walk_stmt_info *);
2472 static omp_context *maybe_lookup_ctx (gimple *);
2474 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2475 for scan phase loop. */
2477 static void
2478 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2479 omp_context *outer_ctx)
2481 /* The only change between inclusive and exclusive scan will be
2482 within the first simd loop, so just use inclusive in the
2483 worksharing loop. */
2484 outer_ctx->scan_inclusive = true;
2485 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2486 OMP_CLAUSE_DECL (c) = integer_zero_node;
2488 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2489 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2490 gsi_replace (gsi, input_stmt, false);
2491 gimple_seq input_body = NULL;
2492 gimple_seq_add_stmt (&input_body, stmt);
2493 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2495 gimple_stmt_iterator input1_gsi = gsi_none ();
2496 struct walk_stmt_info wi;
2497 memset (&wi, 0, sizeof (wi));
2498 wi.val_only = true;
2499 wi.info = (void *) &input1_gsi;
2500 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2501 gcc_assert (!gsi_end_p (input1_gsi));
2503 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2504 gsi_next (&input1_gsi);
2505 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2506 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2507 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2508 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2509 std::swap (input_stmt1, scan_stmt1);
2511 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2512 gimple_omp_set_body (input_stmt1, NULL);
2514 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2515 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2517 gimple_omp_set_body (input_stmt1, input_body1);
2518 gimple_omp_set_body (scan_stmt1, NULL);
2520 gimple_stmt_iterator input2_gsi = gsi_none ();
2521 memset (&wi, 0, sizeof (wi));
2522 wi.val_only = true;
2523 wi.info = (void *) &input2_gsi;
2524 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2525 NULL, &wi);
2526 gcc_assert (!gsi_end_p (input2_gsi));
2528 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2529 gsi_next (&input2_gsi);
2530 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2531 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2532 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2533 std::swap (input_stmt2, scan_stmt2);
2535 gimple_omp_set_body (input_stmt2, NULL);
2537 gimple_omp_set_body (input_stmt, input_body);
2538 gimple_omp_set_body (scan_stmt, scan_body);
2540 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2541 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2543 ctx = new_omp_context (scan_stmt, outer_ctx);
2544 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2546 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2549 /* Scan an OpenMP sections directive. */
2551 static void
2552 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2554 omp_context *ctx;
2556 ctx = new_omp_context (stmt, outer_ctx);
2557 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2558 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2561 /* Scan an OpenMP single directive. */
2563 static void
2564 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2566 omp_context *ctx;
2567 tree name;
2569 ctx = new_omp_context (stmt, outer_ctx);
2570 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2571 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2572 name = create_tmp_var_name (".omp_copy_s");
2573 name = build_decl (gimple_location (stmt),
2574 TYPE_DECL, name, ctx->record_type);
2575 TYPE_NAME (ctx->record_type) = name;
2577 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2578 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2580 if (TYPE_FIELDS (ctx->record_type) == NULL)
2581 ctx->record_type = NULL;
2582 else
2583 layout_type (ctx->record_type);
2586 /* Scan a GIMPLE_OMP_TARGET. */
2588 static void
2589 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2591 omp_context *ctx;
2592 tree name;
2593 bool offloaded = is_gimple_omp_offloaded (stmt);
2594 tree clauses = gimple_omp_target_clauses (stmt);
2596 ctx = new_omp_context (stmt, outer_ctx);
2597 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2598 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2599 name = create_tmp_var_name (".omp_data_t");
2600 name = build_decl (gimple_location (stmt),
2601 TYPE_DECL, name, ctx->record_type);
2602 DECL_ARTIFICIAL (name) = 1;
2603 DECL_NAMELESS (name) = 1;
2604 TYPE_NAME (ctx->record_type) = name;
2605 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2607 if (offloaded)
2609 create_omp_child_function (ctx, false);
2610 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2613 scan_sharing_clauses (clauses, ctx);
2614 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2616 if (TYPE_FIELDS (ctx->record_type) == NULL)
2617 ctx->record_type = ctx->receiver_decl = NULL;
2618 else
2620 TYPE_FIELDS (ctx->record_type)
2621 = nreverse (TYPE_FIELDS (ctx->record_type));
2622 if (flag_checking)
2624 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2625 for (tree field = TYPE_FIELDS (ctx->record_type);
2626 field;
2627 field = DECL_CHAIN (field))
2628 gcc_assert (DECL_ALIGN (field) == align);
2630 layout_type (ctx->record_type);
2631 if (offloaded)
2632 fixup_child_record_type (ctx);
2636 /* Scan an OpenMP teams directive. */
2638 static void
2639 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2641 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2643 if (!gimple_omp_teams_host (stmt))
2645 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2646 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2647 return;
2649 taskreg_contexts.safe_push (ctx);
2650 gcc_assert (taskreg_nesting_level == 1);
2651 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2652 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2653 tree name = create_tmp_var_name (".omp_data_s");
2654 name = build_decl (gimple_location (stmt),
2655 TYPE_DECL, name, ctx->record_type);
2656 DECL_ARTIFICIAL (name) = 1;
2657 DECL_NAMELESS (name) = 1;
2658 TYPE_NAME (ctx->record_type) = name;
2659 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2660 create_omp_child_function (ctx, false);
2661 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2663 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2664 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2666 if (TYPE_FIELDS (ctx->record_type) == NULL)
2667 ctx->record_type = ctx->receiver_decl = NULL;
2670 /* Check nesting restrictions. */
2671 static bool
2672 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2674 tree c;
2676 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2677 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2678 the original copy of its contents. */
2679 return true;
2681 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2682 inside an OpenACC CTX. */
2683 if (!(is_gimple_omp (stmt)
2684 && is_gimple_omp_oacc (stmt))
2685 /* Except for atomic codes that we share with OpenMP. */
2686 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2687 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2689 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2691 error_at (gimple_location (stmt),
2692 "non-OpenACC construct inside of OpenACC routine");
2693 return false;
2695 else
2696 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2697 if (is_gimple_omp (octx->stmt)
2698 && is_gimple_omp_oacc (octx->stmt))
2700 error_at (gimple_location (stmt),
2701 "non-OpenACC construct inside of OpenACC region");
2702 return false;
2706 if (ctx != NULL)
2708 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2709 && ctx->outer
2710 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2711 ctx = ctx->outer;
2712 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2713 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2714 && !ctx->loop_p)
2716 c = NULL_TREE;
2717 if (ctx->order_concurrent
2718 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2719 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2720 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2722 error_at (gimple_location (stmt),
2723 "OpenMP constructs other than %<parallel%>, %<loop%>"
2724 " or %<simd%> may not be nested inside a region with"
2725 " the %<order(concurrent)%> clause");
2726 return false;
2728 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2730 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2731 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2733 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2734 && (ctx->outer == NULL
2735 || !gimple_omp_for_combined_into_p (ctx->stmt)
2736 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2737 || (gimple_omp_for_kind (ctx->outer->stmt)
2738 != GF_OMP_FOR_KIND_FOR)
2739 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2741 error_at (gimple_location (stmt),
2742 "%<ordered simd threads%> must be closely "
2743 "nested inside of %<for simd%> region");
2744 return false;
2746 return true;
2749 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2750 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2751 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2752 return true;
2753 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2754 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2755 return true;
2756 error_at (gimple_location (stmt),
2757 "OpenMP constructs other than "
2758 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2759 "not be nested inside %<simd%> region");
2760 return false;
2762 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2764 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2765 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2766 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2767 && omp_find_clause (gimple_omp_for_clauses (stmt),
2768 OMP_CLAUSE_BIND) == NULL_TREE))
2769 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2771 error_at (gimple_location (stmt),
2772 "only %<distribute%>, %<parallel%> or %<loop%> "
2773 "regions are allowed to be strictly nested inside "
2774 "%<teams%> region");
2775 return false;
2778 else if (ctx->order_concurrent
2779 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2780 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2781 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2782 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2784 if (ctx->loop_p)
2785 error_at (gimple_location (stmt),
2786 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2787 "%<simd%> may not be nested inside a %<loop%> region");
2788 else
2789 error_at (gimple_location (stmt),
2790 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2791 "%<simd%> may not be nested inside a region with "
2792 "the %<order(concurrent)%> clause");
2793 return false;
2796 switch (gimple_code (stmt))
2798 case GIMPLE_OMP_FOR:
2799 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2800 return true;
2801 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2803 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2805 error_at (gimple_location (stmt),
2806 "%<distribute%> region must be strictly nested "
2807 "inside %<teams%> construct");
2808 return false;
2810 return true;
2812 /* We split taskloop into task and nested taskloop in it. */
2813 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2814 return true;
2815 /* For now, hope this will change and loop bind(parallel) will not
2816 be allowed in lots of contexts. */
2817 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2818 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2819 return true;
2820 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2822 bool ok = false;
2824 if (ctx)
2825 switch (gimple_code (ctx->stmt))
2827 case GIMPLE_OMP_FOR:
2828 ok = (gimple_omp_for_kind (ctx->stmt)
2829 == GF_OMP_FOR_KIND_OACC_LOOP);
2830 break;
2832 case GIMPLE_OMP_TARGET:
2833 switch (gimple_omp_target_kind (ctx->stmt))
2835 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2836 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2837 ok = true;
2838 break;
2840 default:
2841 break;
2844 default:
2845 break;
2847 else if (oacc_get_fn_attrib (current_function_decl))
2848 ok = true;
2849 if (!ok)
2851 error_at (gimple_location (stmt),
2852 "OpenACC loop directive must be associated with"
2853 " an OpenACC compute region");
2854 return false;
2857 /* FALLTHRU */
2858 case GIMPLE_CALL:
2859 if (is_gimple_call (stmt)
2860 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2861 == BUILT_IN_GOMP_CANCEL
2862 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2863 == BUILT_IN_GOMP_CANCELLATION_POINT))
2865 const char *bad = NULL;
2866 const char *kind = NULL;
2867 const char *construct
2868 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2869 == BUILT_IN_GOMP_CANCEL)
2870 ? "cancel"
2871 : "cancellation point";
2872 if (ctx == NULL)
2874 error_at (gimple_location (stmt), "orphaned %qs construct",
2875 construct);
2876 return false;
2878 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2879 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2880 : 0)
2882 case 1:
2883 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2884 bad = "parallel";
2885 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2886 == BUILT_IN_GOMP_CANCEL
2887 && !integer_zerop (gimple_call_arg (stmt, 1)))
2888 ctx->cancellable = true;
2889 kind = "parallel";
2890 break;
2891 case 2:
2892 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2893 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2894 bad = "for";
2895 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2896 == BUILT_IN_GOMP_CANCEL
2897 && !integer_zerop (gimple_call_arg (stmt, 1)))
2899 ctx->cancellable = true;
2900 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2901 OMP_CLAUSE_NOWAIT))
2902 warning_at (gimple_location (stmt), 0,
2903 "%<cancel for%> inside "
2904 "%<nowait%> for construct");
2905 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2906 OMP_CLAUSE_ORDERED))
2907 warning_at (gimple_location (stmt), 0,
2908 "%<cancel for%> inside "
2909 "%<ordered%> for construct");
2911 kind = "for";
2912 break;
2913 case 4:
2914 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2915 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2916 bad = "sections";
2917 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2918 == BUILT_IN_GOMP_CANCEL
2919 && !integer_zerop (gimple_call_arg (stmt, 1)))
2921 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2923 ctx->cancellable = true;
2924 if (omp_find_clause (gimple_omp_sections_clauses
2925 (ctx->stmt),
2926 OMP_CLAUSE_NOWAIT))
2927 warning_at (gimple_location (stmt), 0,
2928 "%<cancel sections%> inside "
2929 "%<nowait%> sections construct");
2931 else
2933 gcc_assert (ctx->outer
2934 && gimple_code (ctx->outer->stmt)
2935 == GIMPLE_OMP_SECTIONS);
2936 ctx->outer->cancellable = true;
2937 if (omp_find_clause (gimple_omp_sections_clauses
2938 (ctx->outer->stmt),
2939 OMP_CLAUSE_NOWAIT))
2940 warning_at (gimple_location (stmt), 0,
2941 "%<cancel sections%> inside "
2942 "%<nowait%> sections construct");
2945 kind = "sections";
2946 break;
2947 case 8:
2948 if (!is_task_ctx (ctx)
2949 && (!is_taskloop_ctx (ctx)
2950 || ctx->outer == NULL
2951 || !is_task_ctx (ctx->outer)))
2952 bad = "task";
2953 else
2955 for (omp_context *octx = ctx->outer;
2956 octx; octx = octx->outer)
2958 switch (gimple_code (octx->stmt))
2960 case GIMPLE_OMP_TASKGROUP:
2961 break;
2962 case GIMPLE_OMP_TARGET:
2963 if (gimple_omp_target_kind (octx->stmt)
2964 != GF_OMP_TARGET_KIND_REGION)
2965 continue;
2966 /* FALLTHRU */
2967 case GIMPLE_OMP_PARALLEL:
2968 case GIMPLE_OMP_TEAMS:
2969 error_at (gimple_location (stmt),
2970 "%<%s taskgroup%> construct not closely "
2971 "nested inside of %<taskgroup%> region",
2972 construct);
2973 return false;
2974 case GIMPLE_OMP_TASK:
2975 if (gimple_omp_task_taskloop_p (octx->stmt)
2976 && octx->outer
2977 && is_taskloop_ctx (octx->outer))
2979 tree clauses
2980 = gimple_omp_for_clauses (octx->outer->stmt);
2981 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2982 break;
2984 continue;
2985 default:
2986 continue;
2988 break;
2990 ctx->cancellable = true;
2992 kind = "taskgroup";
2993 break;
2994 default:
2995 error_at (gimple_location (stmt), "invalid arguments");
2996 return false;
2998 if (bad)
3000 error_at (gimple_location (stmt),
3001 "%<%s %s%> construct not closely nested inside of %qs",
3002 construct, kind, bad);
3003 return false;
3006 /* FALLTHRU */
3007 case GIMPLE_OMP_SECTIONS:
3008 case GIMPLE_OMP_SINGLE:
3009 for (; ctx != NULL; ctx = ctx->outer)
3010 switch (gimple_code (ctx->stmt))
3012 case GIMPLE_OMP_FOR:
3013 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3014 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3015 break;
3016 /* FALLTHRU */
3017 case GIMPLE_OMP_SECTIONS:
3018 case GIMPLE_OMP_SINGLE:
3019 case GIMPLE_OMP_ORDERED:
3020 case GIMPLE_OMP_MASTER:
3021 case GIMPLE_OMP_TASK:
3022 case GIMPLE_OMP_CRITICAL:
3023 if (is_gimple_call (stmt))
3025 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3026 != BUILT_IN_GOMP_BARRIER)
3027 return true;
3028 error_at (gimple_location (stmt),
3029 "barrier region may not be closely nested inside "
3030 "of work-sharing, %<loop%>, %<critical%>, "
3031 "%<ordered%>, %<master%>, explicit %<task%> or "
3032 "%<taskloop%> region");
3033 return false;
3035 error_at (gimple_location (stmt),
3036 "work-sharing region may not be closely nested inside "
3037 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3038 "%<master%>, explicit %<task%> or %<taskloop%> region");
3039 return false;
3040 case GIMPLE_OMP_PARALLEL:
3041 case GIMPLE_OMP_TEAMS:
3042 return true;
3043 case GIMPLE_OMP_TARGET:
3044 if (gimple_omp_target_kind (ctx->stmt)
3045 == GF_OMP_TARGET_KIND_REGION)
3046 return true;
3047 break;
3048 default:
3049 break;
3051 break;
3052 case GIMPLE_OMP_MASTER:
3053 for (; ctx != NULL; ctx = ctx->outer)
3054 switch (gimple_code (ctx->stmt))
3056 case GIMPLE_OMP_FOR:
3057 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3058 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3059 break;
3060 /* FALLTHRU */
3061 case GIMPLE_OMP_SECTIONS:
3062 case GIMPLE_OMP_SINGLE:
3063 case GIMPLE_OMP_TASK:
3064 error_at (gimple_location (stmt),
3065 "%<master%> region may not be closely nested inside "
3066 "of work-sharing, %<loop%>, explicit %<task%> or "
3067 "%<taskloop%> region");
3068 return false;
3069 case GIMPLE_OMP_PARALLEL:
3070 case GIMPLE_OMP_TEAMS:
3071 return true;
3072 case GIMPLE_OMP_TARGET:
3073 if (gimple_omp_target_kind (ctx->stmt)
3074 == GF_OMP_TARGET_KIND_REGION)
3075 return true;
3076 break;
3077 default:
3078 break;
3080 break;
3081 case GIMPLE_OMP_TASK:
3082 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3083 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3084 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3085 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3087 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3088 error_at (OMP_CLAUSE_LOCATION (c),
3089 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3090 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3091 return false;
3093 break;
3094 case GIMPLE_OMP_ORDERED:
3095 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3096 c; c = OMP_CLAUSE_CHAIN (c))
3098 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3100 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3101 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3102 continue;
3104 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3105 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3106 || kind == OMP_CLAUSE_DEPEND_SINK)
3108 tree oclause;
3109 /* Look for containing ordered(N) loop. */
3110 if (ctx == NULL
3111 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3112 || (oclause
3113 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3114 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3116 error_at (OMP_CLAUSE_LOCATION (c),
3117 "%<ordered%> construct with %<depend%> clause "
3118 "must be closely nested inside an %<ordered%> "
3119 "loop");
3120 return false;
3122 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3124 error_at (OMP_CLAUSE_LOCATION (c),
3125 "%<ordered%> construct with %<depend%> clause "
3126 "must be closely nested inside a loop with "
3127 "%<ordered%> clause with a parameter");
3128 return false;
3131 else
3133 error_at (OMP_CLAUSE_LOCATION (c),
3134 "invalid depend kind in omp %<ordered%> %<depend%>");
3135 return false;
3138 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3139 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3141 /* ordered simd must be closely nested inside of simd region,
3142 and simd region must not encounter constructs other than
3143 ordered simd, therefore ordered simd may be either orphaned,
3144 or ctx->stmt must be simd. The latter case is handled already
3145 earlier. */
3146 if (ctx != NULL)
3148 error_at (gimple_location (stmt),
3149 "%<ordered%> %<simd%> must be closely nested inside "
3150 "%<simd%> region");
3151 return false;
3154 for (; ctx != NULL; ctx = ctx->outer)
3155 switch (gimple_code (ctx->stmt))
3157 case GIMPLE_OMP_CRITICAL:
3158 case GIMPLE_OMP_TASK:
3159 case GIMPLE_OMP_ORDERED:
3160 ordered_in_taskloop:
3161 error_at (gimple_location (stmt),
3162 "%<ordered%> region may not be closely nested inside "
3163 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3164 "%<taskloop%> region");
3165 return false;
3166 case GIMPLE_OMP_FOR:
3167 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3168 goto ordered_in_taskloop;
3169 tree o;
3170 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3171 OMP_CLAUSE_ORDERED);
3172 if (o == NULL)
3174 error_at (gimple_location (stmt),
3175 "%<ordered%> region must be closely nested inside "
3176 "a loop region with an %<ordered%> clause");
3177 return false;
3179 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3180 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3182 error_at (gimple_location (stmt),
3183 "%<ordered%> region without %<depend%> clause may "
3184 "not be closely nested inside a loop region with "
3185 "an %<ordered%> clause with a parameter");
3186 return false;
3188 return true;
3189 case GIMPLE_OMP_TARGET:
3190 if (gimple_omp_target_kind (ctx->stmt)
3191 != GF_OMP_TARGET_KIND_REGION)
3192 break;
3193 /* FALLTHRU */
3194 case GIMPLE_OMP_PARALLEL:
3195 case GIMPLE_OMP_TEAMS:
3196 error_at (gimple_location (stmt),
3197 "%<ordered%> region must be closely nested inside "
3198 "a loop region with an %<ordered%> clause");
3199 return false;
3200 default:
3201 break;
3203 break;
3204 case GIMPLE_OMP_CRITICAL:
3206 tree this_stmt_name
3207 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3208 for (; ctx != NULL; ctx = ctx->outer)
3209 if (gomp_critical *other_crit
3210 = dyn_cast <gomp_critical *> (ctx->stmt))
3211 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3213 error_at (gimple_location (stmt),
3214 "%<critical%> region may not be nested inside "
3215 "a %<critical%> region with the same name");
3216 return false;
3219 break;
3220 case GIMPLE_OMP_TEAMS:
3221 if (ctx == NULL)
3222 break;
3223 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3224 || (gimple_omp_target_kind (ctx->stmt)
3225 != GF_OMP_TARGET_KIND_REGION))
3227 /* Teams construct can appear either strictly nested inside of
3228 target construct with no intervening stmts, or can be encountered
3229 only by initial task (so must not appear inside any OpenMP
3230 construct. */
3231 error_at (gimple_location (stmt),
3232 "%<teams%> construct must be closely nested inside of "
3233 "%<target%> construct or not nested in any OpenMP "
3234 "construct");
3235 return false;
3237 break;
3238 case GIMPLE_OMP_TARGET:
3239 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3240 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3241 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3242 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3244 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3245 error_at (OMP_CLAUSE_LOCATION (c),
3246 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3247 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3248 return false;
3250 if (is_gimple_omp_offloaded (stmt)
3251 && oacc_get_fn_attrib (cfun->decl) != NULL)
3253 error_at (gimple_location (stmt),
3254 "OpenACC region inside of OpenACC routine, nested "
3255 "parallelism not supported yet");
3256 return false;
3258 for (; ctx != NULL; ctx = ctx->outer)
3260 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3262 if (is_gimple_omp (stmt)
3263 && is_gimple_omp_oacc (stmt)
3264 && is_gimple_omp (ctx->stmt))
3266 error_at (gimple_location (stmt),
3267 "OpenACC construct inside of non-OpenACC region");
3268 return false;
3270 continue;
3273 const char *stmt_name, *ctx_stmt_name;
3274 switch (gimple_omp_target_kind (stmt))
3276 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3277 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3278 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3279 case GF_OMP_TARGET_KIND_ENTER_DATA:
3280 stmt_name = "target enter data"; break;
3281 case GF_OMP_TARGET_KIND_EXIT_DATA:
3282 stmt_name = "target exit data"; break;
3283 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3284 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3285 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3286 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3287 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3288 stmt_name = "enter/exit data"; break;
3289 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3290 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3291 break;
3292 default: gcc_unreachable ();
3294 switch (gimple_omp_target_kind (ctx->stmt))
3296 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3297 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3298 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3299 ctx_stmt_name = "parallel"; break;
3300 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3301 ctx_stmt_name = "kernels"; break;
3302 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3303 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3304 ctx_stmt_name = "host_data"; break;
3305 default: gcc_unreachable ();
3308 /* OpenACC/OpenMP mismatch? */
3309 if (is_gimple_omp_oacc (stmt)
3310 != is_gimple_omp_oacc (ctx->stmt))
3312 error_at (gimple_location (stmt),
3313 "%s %qs construct inside of %s %qs region",
3314 (is_gimple_omp_oacc (stmt)
3315 ? "OpenACC" : "OpenMP"), stmt_name,
3316 (is_gimple_omp_oacc (ctx->stmt)
3317 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3318 return false;
3320 if (is_gimple_omp_offloaded (ctx->stmt))
3322 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3323 if (is_gimple_omp_oacc (ctx->stmt))
3325 error_at (gimple_location (stmt),
3326 "%qs construct inside of %qs region",
3327 stmt_name, ctx_stmt_name);
3328 return false;
3330 else
3332 warning_at (gimple_location (stmt), 0,
3333 "%qs construct inside of %qs region",
3334 stmt_name, ctx_stmt_name);
3338 break;
3339 default:
3340 break;
3342 return true;
3346 /* Helper function scan_omp.
3348 Callback for walk_tree or operators in walk_gimple_stmt used to
3349 scan for OMP directives in TP. */
3351 static tree
3352 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3354 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3355 omp_context *ctx = (omp_context *) wi->info;
3356 tree t = *tp;
3358 switch (TREE_CODE (t))
3360 case VAR_DECL:
3361 case PARM_DECL:
3362 case LABEL_DECL:
3363 case RESULT_DECL:
3364 if (ctx)
3366 tree repl = remap_decl (t, &ctx->cb);
3367 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3368 *tp = repl;
3370 break;
3372 default:
3373 if (ctx && TYPE_P (t))
3374 *tp = remap_type (t, &ctx->cb);
3375 else if (!DECL_P (t))
3377 *walk_subtrees = 1;
3378 if (ctx)
3380 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3381 if (tem != TREE_TYPE (t))
3383 if (TREE_CODE (t) == INTEGER_CST)
3384 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3385 else
3386 TREE_TYPE (t) = tem;
3390 break;
3393 return NULL_TREE;
3396 /* Return true if FNDECL is a setjmp or a longjmp. */
3398 static bool
3399 setjmp_or_longjmp_p (const_tree fndecl)
3401 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3402 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3403 return true;
3405 tree declname = DECL_NAME (fndecl);
3406 if (!declname
3407 || (DECL_CONTEXT (fndecl) != NULL_TREE
3408 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3409 || !TREE_PUBLIC (fndecl))
3410 return false;
3412 const char *name = IDENTIFIER_POINTER (declname);
3413 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3416 /* Return true if FNDECL is an omp_* runtime API call. */
3418 static bool
3419 omp_runtime_api_call (const_tree fndecl)
3421 tree declname = DECL_NAME (fndecl);
3422 if (!declname
3423 || (DECL_CONTEXT (fndecl) != NULL_TREE
3424 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3425 || !TREE_PUBLIC (fndecl))
3426 return false;
3428 const char *name = IDENTIFIER_POINTER (declname);
3429 if (strncmp (name, "omp_", 4) != 0)
3430 return false;
3432 static const char *omp_runtime_apis[] =
3434 /* This array has 3 sections. First omp_* calls that don't
3435 have any suffixes. */
3436 "target_alloc",
3437 "target_associate_ptr",
3438 "target_disassociate_ptr",
3439 "target_free",
3440 "target_is_present",
3441 "target_memcpy",
3442 "target_memcpy_rect",
3443 NULL,
3444 /* Now omp_* calls that are available as omp_* and omp_*_. */
3445 "capture_affinity",
3446 "destroy_lock",
3447 "destroy_nest_lock",
3448 "display_affinity",
3449 "get_active_level",
3450 "get_affinity_format",
3451 "get_cancellation",
3452 "get_default_device",
3453 "get_dynamic",
3454 "get_initial_device",
3455 "get_level",
3456 "get_max_active_levels",
3457 "get_max_task_priority",
3458 "get_max_threads",
3459 "get_nested",
3460 "get_num_devices",
3461 "get_num_places",
3462 "get_num_procs",
3463 "get_num_teams",
3464 "get_num_threads",
3465 "get_partition_num_places",
3466 "get_place_num",
3467 "get_proc_bind",
3468 "get_team_num",
3469 "get_thread_limit",
3470 "get_thread_num",
3471 "get_wtick",
3472 "get_wtime",
3473 "in_final",
3474 "in_parallel",
3475 "init_lock",
3476 "init_nest_lock",
3477 "is_initial_device",
3478 "pause_resource",
3479 "pause_resource_all",
3480 "set_affinity_format",
3481 "set_lock",
3482 "set_nest_lock",
3483 "test_lock",
3484 "test_nest_lock",
3485 "unset_lock",
3486 "unset_nest_lock",
3487 NULL,
3488 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3489 "get_ancestor_thread_num",
3490 "get_partition_place_nums",
3491 "get_place_num_procs",
3492 "get_place_proc_ids",
3493 "get_schedule",
3494 "get_team_size",
3495 "set_default_device",
3496 "set_dynamic",
3497 "set_max_active_levels",
3498 "set_nested",
3499 "set_num_threads",
3500 "set_schedule"
3503 int mode = 0;
3504 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3506 if (omp_runtime_apis[i] == NULL)
3508 mode++;
3509 continue;
3511 size_t len = strlen (omp_runtime_apis[i]);
3512 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3513 && (name[4 + len] == '\0'
3514 || (mode > 0
3515 && name[4 + len] == '_'
3516 && (name[4 + len + 1] == '\0'
3517 || (mode > 1
3518 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3519 return true;
3521 return false;
3524 /* Helper function for scan_omp.
3526 Callback for walk_gimple_stmt used to scan for OMP directives in
3527 the current statement in GSI. */
3529 static tree
3530 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3531 struct walk_stmt_info *wi)
3533 gimple *stmt = gsi_stmt (*gsi);
3534 omp_context *ctx = (omp_context *) wi->info;
3536 if (gimple_has_location (stmt))
3537 input_location = gimple_location (stmt);
3539 /* Check the nesting restrictions. */
3540 bool remove = false;
3541 if (is_gimple_omp (stmt))
3542 remove = !check_omp_nesting_restrictions (stmt, ctx);
3543 else if (is_gimple_call (stmt))
3545 tree fndecl = gimple_call_fndecl (stmt);
3546 if (fndecl)
3548 if (ctx
3549 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3550 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3551 && setjmp_or_longjmp_p (fndecl)
3552 && !ctx->loop_p)
3554 remove = true;
3555 error_at (gimple_location (stmt),
3556 "setjmp/longjmp inside %<simd%> construct");
3558 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3559 switch (DECL_FUNCTION_CODE (fndecl))
3561 case BUILT_IN_GOMP_BARRIER:
3562 case BUILT_IN_GOMP_CANCEL:
3563 case BUILT_IN_GOMP_CANCELLATION_POINT:
3564 case BUILT_IN_GOMP_TASKYIELD:
3565 case BUILT_IN_GOMP_TASKWAIT:
3566 case BUILT_IN_GOMP_TASKGROUP_START:
3567 case BUILT_IN_GOMP_TASKGROUP_END:
3568 remove = !check_omp_nesting_restrictions (stmt, ctx);
3569 break;
3570 default:
3571 break;
3573 else if (ctx)
3575 omp_context *octx = ctx;
3576 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3577 octx = ctx->outer;
3578 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3580 remove = true;
3581 error_at (gimple_location (stmt),
3582 "OpenMP runtime API call %qD in a region with "
3583 "%<order(concurrent)%> clause", fndecl);
3588 if (remove)
3590 stmt = gimple_build_nop ();
3591 gsi_replace (gsi, stmt, false);
3594 *handled_ops_p = true;
3596 switch (gimple_code (stmt))
3598 case GIMPLE_OMP_PARALLEL:
3599 taskreg_nesting_level++;
3600 scan_omp_parallel (gsi, ctx);
3601 taskreg_nesting_level--;
3602 break;
3604 case GIMPLE_OMP_TASK:
3605 taskreg_nesting_level++;
3606 scan_omp_task (gsi, ctx);
3607 taskreg_nesting_level--;
3608 break;
3610 case GIMPLE_OMP_FOR:
3611 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3612 == GF_OMP_FOR_KIND_SIMD)
3613 && gimple_omp_for_combined_into_p (stmt)
3614 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3616 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3617 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3618 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3620 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3621 break;
3624 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3625 == GF_OMP_FOR_KIND_SIMD)
3626 && omp_maybe_offloaded_ctx (ctx)
3627 && omp_max_simt_vf ())
3628 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3629 else
3630 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3631 break;
3633 case GIMPLE_OMP_SECTIONS:
3634 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3635 break;
3637 case GIMPLE_OMP_SINGLE:
3638 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3639 break;
3641 case GIMPLE_OMP_SCAN:
3642 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3644 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3645 ctx->scan_inclusive = true;
3646 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3647 ctx->scan_exclusive = true;
3649 /* FALLTHRU */
3650 case GIMPLE_OMP_SECTION:
3651 case GIMPLE_OMP_MASTER:
3652 case GIMPLE_OMP_ORDERED:
3653 case GIMPLE_OMP_CRITICAL:
3654 case GIMPLE_OMP_GRID_BODY:
3655 ctx = new_omp_context (stmt, ctx);
3656 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3657 break;
3659 case GIMPLE_OMP_TASKGROUP:
3660 ctx = new_omp_context (stmt, ctx);
3661 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3662 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3663 break;
3665 case GIMPLE_OMP_TARGET:
3666 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3667 break;
3669 case GIMPLE_OMP_TEAMS:
3670 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3672 taskreg_nesting_level++;
3673 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3674 taskreg_nesting_level--;
3676 else
3677 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3678 break;
3680 case GIMPLE_BIND:
3682 tree var;
3684 *handled_ops_p = false;
3685 if (ctx)
3686 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3687 var ;
3688 var = DECL_CHAIN (var))
3689 insert_decl_map (&ctx->cb, var, var);
3691 break;
3692 default:
3693 *handled_ops_p = false;
3694 break;
3697 return NULL_TREE;
3701 /* Scan all the statements starting at the current statement. CTX
3702 contains context information about the OMP directives and
3703 clauses found during the scan. */
3705 static void
3706 scan_omp (gimple_seq *body_p, omp_context *ctx)
3708 location_t saved_location;
3709 struct walk_stmt_info wi;
3711 memset (&wi, 0, sizeof (wi));
3712 wi.info = ctx;
3713 wi.want_locations = true;
3715 saved_location = input_location;
3716 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3717 input_location = saved_location;
3720 /* Re-gimplification and code generation routines. */
3722 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3723 of BIND if in a method. */
3725 static void
3726 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3728 if (DECL_ARGUMENTS (current_function_decl)
3729 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3730 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3731 == POINTER_TYPE))
3733 tree vars = gimple_bind_vars (bind);
3734 for (tree *pvar = &vars; *pvar; )
3735 if (omp_member_access_dummy_var (*pvar))
3736 *pvar = DECL_CHAIN (*pvar);
3737 else
3738 pvar = &DECL_CHAIN (*pvar);
3739 gimple_bind_set_vars (bind, vars);
3743 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3744 block and its subblocks. */
3746 static void
3747 remove_member_access_dummy_vars (tree block)
3749 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3750 if (omp_member_access_dummy_var (*pvar))
3751 *pvar = DECL_CHAIN (*pvar);
3752 else
3753 pvar = &DECL_CHAIN (*pvar);
3755 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3756 remove_member_access_dummy_vars (block);
3759 /* If a context was created for STMT when it was scanned, return it. */
3761 static omp_context *
3762 maybe_lookup_ctx (gimple *stmt)
3764 splay_tree_node n;
3765 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3766 return n ? (omp_context *) n->value : NULL;
3770 /* Find the mapping for DECL in CTX or the immediately enclosing
3771 context that has a mapping for DECL.
3773 If CTX is a nested parallel directive, we may have to use the decl
3774 mappings created in CTX's parent context. Suppose that we have the
3775 following parallel nesting (variable UIDs showed for clarity):
3777 iD.1562 = 0;
3778 #omp parallel shared(iD.1562) -> outer parallel
3779 iD.1562 = iD.1562 + 1;
3781 #omp parallel shared (iD.1562) -> inner parallel
3782 iD.1562 = iD.1562 - 1;
3784 Each parallel structure will create a distinct .omp_data_s structure
3785 for copying iD.1562 in/out of the directive:
3787 outer parallel .omp_data_s.1.i -> iD.1562
3788 inner parallel .omp_data_s.2.i -> iD.1562
3790 A shared variable mapping will produce a copy-out operation before
3791 the parallel directive and a copy-in operation after it. So, in
3792 this case we would have:
3794 iD.1562 = 0;
3795 .omp_data_o.1.i = iD.1562;
3796 #omp parallel shared(iD.1562) -> outer parallel
3797 .omp_data_i.1 = &.omp_data_o.1
3798 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3800 .omp_data_o.2.i = iD.1562; -> **
3801 #omp parallel shared(iD.1562) -> inner parallel
3802 .omp_data_i.2 = &.omp_data_o.2
3803 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3806 ** This is a problem. The symbol iD.1562 cannot be referenced
3807 inside the body of the outer parallel region. But since we are
3808 emitting this copy operation while expanding the inner parallel
3809 directive, we need to access the CTX structure of the outer
3810 parallel directive to get the correct mapping:
3812 .omp_data_o.2.i = .omp_data_i.1->i
3814 Since there may be other workshare or parallel directives enclosing
3815 the parallel directive, it may be necessary to walk up the context
3816 parent chain. This is not a problem in general because nested
3817 parallelism happens only rarely. */
3819 static tree
3820 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3822 tree t;
3823 omp_context *up;
3825 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3826 t = maybe_lookup_decl (decl, up);
3828 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3830 return t ? t : decl;
3834 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3835 in outer contexts. */
3837 static tree
3838 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3840 tree t = NULL;
3841 omp_context *up;
3843 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3844 t = maybe_lookup_decl (decl, up);
3846 return t ? t : decl;
3850 /* Construct the initialization value for reduction operation OP. */
3852 tree
3853 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3855 switch (op)
3857 case PLUS_EXPR:
3858 case MINUS_EXPR:
3859 case BIT_IOR_EXPR:
3860 case BIT_XOR_EXPR:
3861 case TRUTH_OR_EXPR:
3862 case TRUTH_ORIF_EXPR:
3863 case TRUTH_XOR_EXPR:
3864 case NE_EXPR:
3865 return build_zero_cst (type);
3867 case MULT_EXPR:
3868 case TRUTH_AND_EXPR:
3869 case TRUTH_ANDIF_EXPR:
3870 case EQ_EXPR:
3871 return fold_convert_loc (loc, type, integer_one_node);
3873 case BIT_AND_EXPR:
3874 return fold_convert_loc (loc, type, integer_minus_one_node);
3876 case MAX_EXPR:
3877 if (SCALAR_FLOAT_TYPE_P (type))
3879 REAL_VALUE_TYPE max, min;
3880 if (HONOR_INFINITIES (type))
3882 real_inf (&max);
3883 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3885 else
3886 real_maxval (&min, 1, TYPE_MODE (type));
3887 return build_real (type, min);
3889 else if (POINTER_TYPE_P (type))
3891 wide_int min
3892 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3893 return wide_int_to_tree (type, min);
3895 else
3897 gcc_assert (INTEGRAL_TYPE_P (type));
3898 return TYPE_MIN_VALUE (type);
3901 case MIN_EXPR:
3902 if (SCALAR_FLOAT_TYPE_P (type))
3904 REAL_VALUE_TYPE max;
3905 if (HONOR_INFINITIES (type))
3906 real_inf (&max);
3907 else
3908 real_maxval (&max, 0, TYPE_MODE (type));
3909 return build_real (type, max);
3911 else if (POINTER_TYPE_P (type))
3913 wide_int max
3914 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3915 return wide_int_to_tree (type, max);
3917 else
3919 gcc_assert (INTEGRAL_TYPE_P (type));
3920 return TYPE_MAX_VALUE (type);
3923 default:
3924 gcc_unreachable ();
3928 /* Construct the initialization value for reduction CLAUSE. */
3930 tree
3931 omp_reduction_init (tree clause, tree type)
3933 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3934 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3937 /* Return alignment to be assumed for var in CLAUSE, which should be
3938 OMP_CLAUSE_ALIGNED. */
3940 static tree
3941 omp_clause_aligned_alignment (tree clause)
3943 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3944 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3946 /* Otherwise return implementation defined alignment. */
3947 unsigned int al = 1;
3948 opt_scalar_mode mode_iter;
3949 auto_vector_sizes sizes;
3950 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3951 poly_uint64 vs = 0;
3952 for (unsigned int i = 0; i < sizes.length (); ++i)
3953 vs = ordered_max (vs, sizes[i]);
3954 static enum mode_class classes[]
3955 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3956 for (int i = 0; i < 4; i += 2)
3957 /* The for loop above dictates that we only walk through scalar classes. */
3958 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3960 scalar_mode mode = mode_iter.require ();
3961 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3962 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3963 continue;
3964 while (maybe_ne (vs, 0U)
3965 && known_lt (GET_MODE_SIZE (vmode), vs)
3966 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3967 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3969 tree type = lang_hooks.types.type_for_mode (mode, 1);
3970 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3971 continue;
3972 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3973 GET_MODE_SIZE (mode));
3974 type = build_vector_type (type, nelts);
3975 if (TYPE_MODE (type) != vmode)
3976 continue;
3977 if (TYPE_ALIGN_UNIT (type) > al)
3978 al = TYPE_ALIGN_UNIT (type);
3980 return build_int_cst (integer_type_node, al);
3984 /* This structure is part of the interface between lower_rec_simd_input_clauses
3985 and lower_rec_input_clauses. */
3987 class omplow_simd_context {
3988 public:
3989 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3990 tree idx;
3991 tree lane;
3992 tree lastlane;
3993 vec<tree, va_heap> simt_eargs;
3994 gimple_seq simt_dlist;
3995 poly_uint64_pod max_vf;
3996 bool is_simt;
3999 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4000 privatization. */
4002 static bool
4003 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4004 omplow_simd_context *sctx, tree &ivar,
4005 tree &lvar, tree *rvar = NULL,
4006 tree *rvar2 = NULL)
4008 if (known_eq (sctx->max_vf, 0U))
4010 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4011 if (maybe_gt (sctx->max_vf, 1U))
4013 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4014 OMP_CLAUSE_SAFELEN);
4015 if (c)
4017 poly_uint64 safe_len;
4018 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4019 || maybe_lt (safe_len, 1U))
4020 sctx->max_vf = 1;
4021 else
4022 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4025 if (maybe_gt (sctx->max_vf, 1U))
4027 sctx->idx = create_tmp_var (unsigned_type_node);
4028 sctx->lane = create_tmp_var (unsigned_type_node);
4031 if (known_eq (sctx->max_vf, 1U))
4032 return false;
4034 if (sctx->is_simt)
4036 if (is_gimple_reg (new_var))
4038 ivar = lvar = new_var;
4039 return true;
4041 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4042 ivar = lvar = create_tmp_var (type);
4043 TREE_ADDRESSABLE (ivar) = 1;
4044 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4045 NULL, DECL_ATTRIBUTES (ivar));
4046 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4047 tree clobber = build_constructor (type, NULL);
4048 TREE_THIS_VOLATILE (clobber) = 1;
4049 gimple *g = gimple_build_assign (ivar, clobber);
4050 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4052 else
4054 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4055 tree avar = create_tmp_var_raw (atype);
4056 if (TREE_ADDRESSABLE (new_var))
4057 TREE_ADDRESSABLE (avar) = 1;
4058 DECL_ATTRIBUTES (avar)
4059 = tree_cons (get_identifier ("omp simd array"), NULL,
4060 DECL_ATTRIBUTES (avar));
4061 gimple_add_tmp_var (avar);
4062 tree iavar = avar;
4063 if (rvar && !ctx->for_simd_scan_phase)
4065 /* For inscan reductions, create another array temporary,
4066 which will hold the reduced value. */
4067 iavar = create_tmp_var_raw (atype);
4068 if (TREE_ADDRESSABLE (new_var))
4069 TREE_ADDRESSABLE (iavar) = 1;
4070 DECL_ATTRIBUTES (iavar)
4071 = tree_cons (get_identifier ("omp simd array"), NULL,
4072 tree_cons (get_identifier ("omp simd inscan"), NULL,
4073 DECL_ATTRIBUTES (iavar)));
4074 gimple_add_tmp_var (iavar);
4075 ctx->cb.decl_map->put (avar, iavar);
4076 if (sctx->lastlane == NULL_TREE)
4077 sctx->lastlane = create_tmp_var (unsigned_type_node);
4078 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4079 sctx->lastlane, NULL_TREE, NULL_TREE);
4080 TREE_THIS_NOTRAP (*rvar) = 1;
4082 if (ctx->scan_exclusive)
4084 /* And for exclusive scan yet another one, which will
4085 hold the value during the scan phase. */
4086 tree savar = create_tmp_var_raw (atype);
4087 if (TREE_ADDRESSABLE (new_var))
4088 TREE_ADDRESSABLE (savar) = 1;
4089 DECL_ATTRIBUTES (savar)
4090 = tree_cons (get_identifier ("omp simd array"), NULL,
4091 tree_cons (get_identifier ("omp simd inscan "
4092 "exclusive"), NULL,
4093 DECL_ATTRIBUTES (savar)));
4094 gimple_add_tmp_var (savar);
4095 ctx->cb.decl_map->put (iavar, savar);
4096 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4097 sctx->idx, NULL_TREE, NULL_TREE);
4098 TREE_THIS_NOTRAP (*rvar2) = 1;
4101 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4102 NULL_TREE, NULL_TREE);
4103 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4104 NULL_TREE, NULL_TREE);
4105 TREE_THIS_NOTRAP (ivar) = 1;
4106 TREE_THIS_NOTRAP (lvar) = 1;
4108 if (DECL_P (new_var))
4110 SET_DECL_VALUE_EXPR (new_var, lvar);
4111 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4113 return true;
4116 /* Helper function of lower_rec_input_clauses. For a reference
4117 in simd reduction, add an underlying variable it will reference. */
4119 static void
4120 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4122 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4123 if (TREE_CONSTANT (z))
4125 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4126 get_name (new_vard));
4127 gimple_add_tmp_var (z);
4128 TREE_ADDRESSABLE (z) = 1;
4129 z = build_fold_addr_expr_loc (loc, z);
4130 gimplify_assign (new_vard, z, ilist);
4134 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4135 code to emit (type) (tskred_temp[idx]). */
4137 static tree
4138 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4139 unsigned idx)
4141 unsigned HOST_WIDE_INT sz
4142 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4143 tree r = build2 (MEM_REF, pointer_sized_int_node,
4144 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4145 idx * sz));
4146 tree v = create_tmp_var (pointer_sized_int_node);
4147 gimple *g = gimple_build_assign (v, r);
4148 gimple_seq_add_stmt (ilist, g);
4149 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4151 v = create_tmp_var (type);
4152 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4153 gimple_seq_add_stmt (ilist, g);
4155 return v;
4158 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4159 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4160 private variables. Initialization statements go in ILIST, while calls
4161 to destructors go in DLIST. */
4163 static void
4164 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4165 omp_context *ctx, struct omp_for_data *fd)
4167 tree c, copyin_seq, x, ptr;
4168 bool copyin_by_ref = false;
4169 bool lastprivate_firstprivate = false;
4170 bool reduction_omp_orig_ref = false;
4171 int pass;
4172 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4173 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4174 omplow_simd_context sctx = omplow_simd_context ();
4175 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4176 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4177 gimple_seq llist[4] = { };
4178 tree nonconst_simd_if = NULL_TREE;
4180 copyin_seq = NULL;
4181 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4183 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4184 with data sharing clauses referencing variable sized vars. That
4185 is unnecessarily hard to support and very unlikely to result in
4186 vectorized code anyway. */
4187 if (is_simd)
4188 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4189 switch (OMP_CLAUSE_CODE (c))
4191 case OMP_CLAUSE_LINEAR:
4192 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4193 sctx.max_vf = 1;
4194 /* FALLTHRU */
4195 case OMP_CLAUSE_PRIVATE:
4196 case OMP_CLAUSE_FIRSTPRIVATE:
4197 case OMP_CLAUSE_LASTPRIVATE:
4198 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4199 sctx.max_vf = 1;
4200 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4202 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4203 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4204 sctx.max_vf = 1;
4206 break;
4207 case OMP_CLAUSE_REDUCTION:
4208 case OMP_CLAUSE_IN_REDUCTION:
4209 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4210 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4211 sctx.max_vf = 1;
4212 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4214 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4215 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4216 sctx.max_vf = 1;
4218 break;
4219 case OMP_CLAUSE_IF:
4220 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4221 sctx.max_vf = 1;
4222 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4223 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4224 break;
4225 case OMP_CLAUSE_SIMDLEN:
4226 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4227 sctx.max_vf = 1;
4228 break;
4229 case OMP_CLAUSE__CONDTEMP_:
4230 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4231 if (sctx.is_simt)
4232 sctx.max_vf = 1;
4233 break;
4234 default:
4235 continue;
4238 /* Add a placeholder for simduid. */
4239 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4240 sctx.simt_eargs.safe_push (NULL_TREE);
4242 unsigned task_reduction_cnt = 0;
4243 unsigned task_reduction_cntorig = 0;
4244 unsigned task_reduction_cnt_full = 0;
4245 unsigned task_reduction_cntorig_full = 0;
4246 unsigned task_reduction_other_cnt = 0;
4247 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4248 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4249 /* Do all the fixed sized types in the first pass, and the variable sized
4250 types in the second pass. This makes sure that the scalar arguments to
4251 the variable sized types are processed before we use them in the
4252 variable sized operations. For task reductions we use 4 passes, in the
4253 first two we ignore them, in the third one gather arguments for
4254 GOMP_task_reduction_remap call and in the last pass actually handle
4255 the task reductions. */
4256 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4257 ? 4 : 2); ++pass)
4259 if (pass == 2 && task_reduction_cnt)
4261 tskred_atype
4262 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4263 + task_reduction_cntorig);
4264 tskred_avar = create_tmp_var_raw (tskred_atype);
4265 gimple_add_tmp_var (tskred_avar);
4266 TREE_ADDRESSABLE (tskred_avar) = 1;
4267 task_reduction_cnt_full = task_reduction_cnt;
4268 task_reduction_cntorig_full = task_reduction_cntorig;
4270 else if (pass == 3 && task_reduction_cnt)
4272 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4273 gimple *g
4274 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4275 size_int (task_reduction_cntorig),
4276 build_fold_addr_expr (tskred_avar));
4277 gimple_seq_add_stmt (ilist, g);
4279 if (pass == 3 && task_reduction_other_cnt)
4281 /* For reduction clauses, build
4282 tskred_base = (void *) tskred_temp[2]
4283 + omp_get_thread_num () * tskred_temp[1]
4284 or if tskred_temp[1] is known to be constant, that constant
4285 directly. This is the start of the private reduction copy block
4286 for the current thread. */
4287 tree v = create_tmp_var (integer_type_node);
4288 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4289 gimple *g = gimple_build_call (x, 0);
4290 gimple_call_set_lhs (g, v);
4291 gimple_seq_add_stmt (ilist, g);
4292 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4293 tskred_temp = OMP_CLAUSE_DECL (c);
4294 if (is_taskreg_ctx (ctx))
4295 tskred_temp = lookup_decl (tskred_temp, ctx);
4296 tree v2 = create_tmp_var (sizetype);
4297 g = gimple_build_assign (v2, NOP_EXPR, v);
4298 gimple_seq_add_stmt (ilist, g);
4299 if (ctx->task_reductions[0])
4300 v = fold_convert (sizetype, ctx->task_reductions[0]);
4301 else
4302 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4303 tree v3 = create_tmp_var (sizetype);
4304 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4305 gimple_seq_add_stmt (ilist, g);
4306 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4307 tskred_base = create_tmp_var (ptr_type_node);
4308 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4309 gimple_seq_add_stmt (ilist, g);
4311 task_reduction_cnt = 0;
4312 task_reduction_cntorig = 0;
4313 task_reduction_other_cnt = 0;
4314 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4316 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4317 tree var, new_var;
4318 bool by_ref;
4319 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4320 bool task_reduction_p = false;
4321 bool task_reduction_needs_orig_p = false;
4322 tree cond = NULL_TREE;
4324 switch (c_kind)
4326 case OMP_CLAUSE_PRIVATE:
4327 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4328 continue;
4329 break;
4330 case OMP_CLAUSE_SHARED:
4331 /* Ignore shared directives in teams construct inside
4332 of target construct. */
4333 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4334 && !is_host_teams_ctx (ctx))
4335 continue;
4336 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4338 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4339 || is_global_var (OMP_CLAUSE_DECL (c)));
4340 continue;
4342 case OMP_CLAUSE_FIRSTPRIVATE:
4343 case OMP_CLAUSE_COPYIN:
4344 break;
4345 case OMP_CLAUSE_LINEAR:
4346 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4347 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4348 lastprivate_firstprivate = true;
4349 break;
4350 case OMP_CLAUSE_REDUCTION:
4351 case OMP_CLAUSE_IN_REDUCTION:
4352 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4354 task_reduction_p = true;
4355 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4357 task_reduction_other_cnt++;
4358 if (pass == 2)
4359 continue;
4361 else
4362 task_reduction_cnt++;
4363 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4365 var = OMP_CLAUSE_DECL (c);
4366 /* If var is a global variable that isn't privatized
4367 in outer contexts, we don't need to look up the
4368 original address, it is always the address of the
4369 global variable itself. */
4370 if (!DECL_P (var)
4371 || omp_is_reference (var)
4372 || !is_global_var
4373 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4375 task_reduction_needs_orig_p = true;
4376 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4377 task_reduction_cntorig++;
4381 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4382 reduction_omp_orig_ref = true;
4383 break;
4384 case OMP_CLAUSE__REDUCTEMP_:
4385 if (!is_taskreg_ctx (ctx))
4386 continue;
4387 /* FALLTHRU */
4388 case OMP_CLAUSE__LOOPTEMP_:
4389 /* Handle _looptemp_/_reductemp_ clauses only on
4390 parallel/task. */
4391 if (fd)
4392 continue;
4393 break;
4394 case OMP_CLAUSE_LASTPRIVATE:
4395 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4397 lastprivate_firstprivate = true;
4398 if (pass != 0 || is_taskloop_ctx (ctx))
4399 continue;
4401 /* Even without corresponding firstprivate, if
4402 decl is Fortran allocatable, it needs outer var
4403 reference. */
4404 else if (pass == 0
4405 && lang_hooks.decls.omp_private_outer_ref
4406 (OMP_CLAUSE_DECL (c)))
4407 lastprivate_firstprivate = true;
4408 break;
4409 case OMP_CLAUSE_ALIGNED:
4410 if (pass != 1)
4411 continue;
4412 var = OMP_CLAUSE_DECL (c);
4413 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4414 && !is_global_var (var))
4416 new_var = maybe_lookup_decl (var, ctx);
4417 if (new_var == NULL_TREE)
4418 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4419 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4420 tree alarg = omp_clause_aligned_alignment (c);
4421 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4422 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4423 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4424 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4425 gimplify_and_add (x, ilist);
4427 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4428 && is_global_var (var))
4430 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4431 new_var = lookup_decl (var, ctx);
4432 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4433 t = build_fold_addr_expr_loc (clause_loc, t);
4434 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4435 tree alarg = omp_clause_aligned_alignment (c);
4436 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4437 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4438 t = fold_convert_loc (clause_loc, ptype, t);
4439 x = create_tmp_var (ptype);
4440 t = build2 (MODIFY_EXPR, ptype, x, t);
4441 gimplify_and_add (t, ilist);
4442 t = build_simple_mem_ref_loc (clause_loc, x);
4443 SET_DECL_VALUE_EXPR (new_var, t);
4444 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4446 continue;
4447 case OMP_CLAUSE__CONDTEMP_:
4448 if (is_parallel_ctx (ctx)
4449 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4450 break;
4451 continue;
4452 default:
4453 continue;
4456 if (task_reduction_p != (pass >= 2))
4457 continue;
4459 new_var = var = OMP_CLAUSE_DECL (c);
4460 if ((c_kind == OMP_CLAUSE_REDUCTION
4461 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4462 && TREE_CODE (var) == MEM_REF)
4464 var = TREE_OPERAND (var, 0);
4465 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4466 var = TREE_OPERAND (var, 0);
4467 if (TREE_CODE (var) == INDIRECT_REF
4468 || TREE_CODE (var) == ADDR_EXPR)
4469 var = TREE_OPERAND (var, 0);
4470 if (is_variable_sized (var))
4472 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4473 var = DECL_VALUE_EXPR (var);
4474 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4475 var = TREE_OPERAND (var, 0);
4476 gcc_assert (DECL_P (var));
4478 new_var = var;
4480 if (c_kind != OMP_CLAUSE_COPYIN)
4481 new_var = lookup_decl (var, ctx);
4483 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4485 if (pass != 0)
4486 continue;
4488 /* C/C++ array section reductions. */
4489 else if ((c_kind == OMP_CLAUSE_REDUCTION
4490 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4491 && var != OMP_CLAUSE_DECL (c))
4493 if (pass == 0)
4494 continue;
4496 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4497 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4499 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4501 tree b = TREE_OPERAND (orig_var, 1);
4502 b = maybe_lookup_decl (b, ctx);
4503 if (b == NULL)
4505 b = TREE_OPERAND (orig_var, 1);
4506 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4508 if (integer_zerop (bias))
4509 bias = b;
4510 else
4512 bias = fold_convert_loc (clause_loc,
4513 TREE_TYPE (b), bias);
4514 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4515 TREE_TYPE (b), b, bias);
4517 orig_var = TREE_OPERAND (orig_var, 0);
4519 if (pass == 2)
4521 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4522 if (is_global_var (out)
4523 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4524 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4525 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4526 != POINTER_TYPE)))
4527 x = var;
4528 else
4530 bool by_ref = use_pointer_for_field (var, NULL);
4531 x = build_receiver_ref (var, by_ref, ctx);
4532 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4533 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4534 == POINTER_TYPE))
4535 x = build_fold_addr_expr (x);
4537 if (TREE_CODE (orig_var) == INDIRECT_REF)
4538 x = build_simple_mem_ref (x);
4539 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4541 if (var == TREE_OPERAND (orig_var, 0))
4542 x = build_fold_addr_expr (x);
4544 bias = fold_convert (sizetype, bias);
4545 x = fold_convert (ptr_type_node, x);
4546 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4547 TREE_TYPE (x), x, bias);
4548 unsigned cnt = task_reduction_cnt - 1;
4549 if (!task_reduction_needs_orig_p)
4550 cnt += (task_reduction_cntorig_full
4551 - task_reduction_cntorig);
4552 else
4553 cnt = task_reduction_cntorig - 1;
4554 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4555 size_int (cnt), NULL_TREE, NULL_TREE);
4556 gimplify_assign (r, x, ilist);
4557 continue;
4560 if (TREE_CODE (orig_var) == INDIRECT_REF
4561 || TREE_CODE (orig_var) == ADDR_EXPR)
4562 orig_var = TREE_OPERAND (orig_var, 0);
4563 tree d = OMP_CLAUSE_DECL (c);
4564 tree type = TREE_TYPE (d);
4565 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4566 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4567 const char *name = get_name (orig_var);
4568 if (pass == 3)
4570 tree xv = create_tmp_var (ptr_type_node);
4571 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4573 unsigned cnt = task_reduction_cnt - 1;
4574 if (!task_reduction_needs_orig_p)
4575 cnt += (task_reduction_cntorig_full
4576 - task_reduction_cntorig);
4577 else
4578 cnt = task_reduction_cntorig - 1;
4579 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4580 size_int (cnt), NULL_TREE, NULL_TREE);
4582 gimple *g = gimple_build_assign (xv, x);
4583 gimple_seq_add_stmt (ilist, g);
4585 else
4587 unsigned int idx = *ctx->task_reduction_map->get (c);
4588 tree off;
4589 if (ctx->task_reductions[1 + idx])
4590 off = fold_convert (sizetype,
4591 ctx->task_reductions[1 + idx]);
4592 else
4593 off = task_reduction_read (ilist, tskred_temp, sizetype,
4594 7 + 3 * idx + 1);
4595 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4596 tskred_base, off);
4597 gimple_seq_add_stmt (ilist, g);
4599 x = fold_convert (build_pointer_type (boolean_type_node),
4600 xv);
4601 if (TREE_CONSTANT (v))
4602 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4603 TYPE_SIZE_UNIT (type));
4604 else
4606 tree t = maybe_lookup_decl (v, ctx);
4607 if (t)
4608 v = t;
4609 else
4610 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4611 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4612 fb_rvalue);
4613 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4614 TREE_TYPE (v), v,
4615 build_int_cst (TREE_TYPE (v), 1));
4616 t = fold_build2_loc (clause_loc, MULT_EXPR,
4617 TREE_TYPE (v), t,
4618 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4619 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4621 cond = create_tmp_var (TREE_TYPE (x));
4622 gimplify_assign (cond, x, ilist);
4623 x = xv;
4625 else if (TREE_CONSTANT (v))
4627 x = create_tmp_var_raw (type, name);
4628 gimple_add_tmp_var (x);
4629 TREE_ADDRESSABLE (x) = 1;
4630 x = build_fold_addr_expr_loc (clause_loc, x);
4632 else
4634 tree atmp
4635 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4636 tree t = maybe_lookup_decl (v, ctx);
4637 if (t)
4638 v = t;
4639 else
4640 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4641 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4642 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4643 TREE_TYPE (v), v,
4644 build_int_cst (TREE_TYPE (v), 1));
4645 t = fold_build2_loc (clause_loc, MULT_EXPR,
4646 TREE_TYPE (v), t,
4647 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4648 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4649 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4652 tree ptype = build_pointer_type (TREE_TYPE (type));
4653 x = fold_convert_loc (clause_loc, ptype, x);
4654 tree y = create_tmp_var (ptype, name);
4655 gimplify_assign (y, x, ilist);
4656 x = y;
4657 tree yb = y;
4659 if (!integer_zerop (bias))
4661 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4662 bias);
4663 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4665 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4666 pointer_sized_int_node, yb, bias);
4667 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4668 yb = create_tmp_var (ptype, name);
4669 gimplify_assign (yb, x, ilist);
4670 x = yb;
4673 d = TREE_OPERAND (d, 0);
4674 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4675 d = TREE_OPERAND (d, 0);
4676 if (TREE_CODE (d) == ADDR_EXPR)
4678 if (orig_var != var)
4680 gcc_assert (is_variable_sized (orig_var));
4681 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4683 gimplify_assign (new_var, x, ilist);
4684 tree new_orig_var = lookup_decl (orig_var, ctx);
4685 tree t = build_fold_indirect_ref (new_var);
4686 DECL_IGNORED_P (new_var) = 0;
4687 TREE_THIS_NOTRAP (t) = 1;
4688 SET_DECL_VALUE_EXPR (new_orig_var, t);
4689 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4691 else
4693 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4694 build_int_cst (ptype, 0));
4695 SET_DECL_VALUE_EXPR (new_var, x);
4696 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4699 else
4701 gcc_assert (orig_var == var);
4702 if (TREE_CODE (d) == INDIRECT_REF)
4704 x = create_tmp_var (ptype, name);
4705 TREE_ADDRESSABLE (x) = 1;
4706 gimplify_assign (x, yb, ilist);
4707 x = build_fold_addr_expr_loc (clause_loc, x);
4709 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4710 gimplify_assign (new_var, x, ilist);
4712 /* GOMP_taskgroup_reduction_register memsets the whole
4713 array to zero. If the initializer is zero, we don't
4714 need to initialize it again, just mark it as ever
4715 used unconditionally, i.e. cond = true. */
4716 if (cond
4717 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4718 && initializer_zerop (omp_reduction_init (c,
4719 TREE_TYPE (type))))
4721 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4722 boolean_true_node);
4723 gimple_seq_add_stmt (ilist, g);
4724 continue;
4726 tree end = create_artificial_label (UNKNOWN_LOCATION);
4727 if (cond)
4729 gimple *g;
4730 if (!is_parallel_ctx (ctx))
4732 tree condv = create_tmp_var (boolean_type_node);
4733 g = gimple_build_assign (condv,
4734 build_simple_mem_ref (cond));
4735 gimple_seq_add_stmt (ilist, g);
4736 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4737 g = gimple_build_cond (NE_EXPR, condv,
4738 boolean_false_node, end, lab1);
4739 gimple_seq_add_stmt (ilist, g);
4740 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4742 g = gimple_build_assign (build_simple_mem_ref (cond),
4743 boolean_true_node);
4744 gimple_seq_add_stmt (ilist, g);
4747 tree y1 = create_tmp_var (ptype);
4748 gimplify_assign (y1, y, ilist);
4749 tree i2 = NULL_TREE, y2 = NULL_TREE;
4750 tree body2 = NULL_TREE, end2 = NULL_TREE;
4751 tree y3 = NULL_TREE, y4 = NULL_TREE;
4752 if (task_reduction_needs_orig_p)
4754 y3 = create_tmp_var (ptype);
4755 tree ref;
4756 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4757 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4758 size_int (task_reduction_cnt_full
4759 + task_reduction_cntorig - 1),
4760 NULL_TREE, NULL_TREE);
4761 else
4763 unsigned int idx = *ctx->task_reduction_map->get (c);
4764 ref = task_reduction_read (ilist, tskred_temp, ptype,
4765 7 + 3 * idx);
4767 gimplify_assign (y3, ref, ilist);
4769 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4771 if (pass != 3)
4773 y2 = create_tmp_var (ptype);
4774 gimplify_assign (y2, y, ilist);
4776 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4778 tree ref = build_outer_var_ref (var, ctx);
4779 /* For ref build_outer_var_ref already performs this. */
4780 if (TREE_CODE (d) == INDIRECT_REF)
4781 gcc_assert (omp_is_reference (var));
4782 else if (TREE_CODE (d) == ADDR_EXPR)
4783 ref = build_fold_addr_expr (ref);
4784 else if (omp_is_reference (var))
4785 ref = build_fold_addr_expr (ref);
4786 ref = fold_convert_loc (clause_loc, ptype, ref);
4787 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4788 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4790 y3 = create_tmp_var (ptype);
4791 gimplify_assign (y3, unshare_expr (ref), ilist);
4793 if (is_simd)
4795 y4 = create_tmp_var (ptype);
4796 gimplify_assign (y4, ref, dlist);
4800 tree i = create_tmp_var (TREE_TYPE (v));
4801 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4802 tree body = create_artificial_label (UNKNOWN_LOCATION);
4803 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4804 if (y2)
4806 i2 = create_tmp_var (TREE_TYPE (v));
4807 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4808 body2 = create_artificial_label (UNKNOWN_LOCATION);
4809 end2 = create_artificial_label (UNKNOWN_LOCATION);
4810 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4812 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4814 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4815 tree decl_placeholder
4816 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4817 SET_DECL_VALUE_EXPR (decl_placeholder,
4818 build_simple_mem_ref (y1));
4819 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4820 SET_DECL_VALUE_EXPR (placeholder,
4821 y3 ? build_simple_mem_ref (y3)
4822 : error_mark_node);
4823 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4824 x = lang_hooks.decls.omp_clause_default_ctor
4825 (c, build_simple_mem_ref (y1),
4826 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4827 if (x)
4828 gimplify_and_add (x, ilist);
4829 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4831 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4832 lower_omp (&tseq, ctx);
4833 gimple_seq_add_seq (ilist, tseq);
4835 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4836 if (is_simd)
4838 SET_DECL_VALUE_EXPR (decl_placeholder,
4839 build_simple_mem_ref (y2));
4840 SET_DECL_VALUE_EXPR (placeholder,
4841 build_simple_mem_ref (y4));
4842 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4843 lower_omp (&tseq, ctx);
4844 gimple_seq_add_seq (dlist, tseq);
4845 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4847 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4848 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4849 if (y2)
4851 x = lang_hooks.decls.omp_clause_dtor
4852 (c, build_simple_mem_ref (y2));
4853 if (x)
4854 gimplify_and_add (x, dlist);
4857 else
4859 x = omp_reduction_init (c, TREE_TYPE (type));
4860 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4862 /* reduction(-:var) sums up the partial results, so it
4863 acts identically to reduction(+:var). */
4864 if (code == MINUS_EXPR)
4865 code = PLUS_EXPR;
4867 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4868 if (is_simd)
4870 x = build2 (code, TREE_TYPE (type),
4871 build_simple_mem_ref (y4),
4872 build_simple_mem_ref (y2));
4873 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4876 gimple *g
4877 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4878 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4879 gimple_seq_add_stmt (ilist, g);
4880 if (y3)
4882 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4883 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4884 gimple_seq_add_stmt (ilist, g);
4886 g = gimple_build_assign (i, PLUS_EXPR, i,
4887 build_int_cst (TREE_TYPE (i), 1));
4888 gimple_seq_add_stmt (ilist, g);
4889 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4890 gimple_seq_add_stmt (ilist, g);
4891 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4892 if (y2)
4894 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4895 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4896 gimple_seq_add_stmt (dlist, g);
4897 if (y4)
4899 g = gimple_build_assign
4900 (y4, POINTER_PLUS_EXPR, y4,
4901 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4902 gimple_seq_add_stmt (dlist, g);
4904 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4905 build_int_cst (TREE_TYPE (i2), 1));
4906 gimple_seq_add_stmt (dlist, g);
4907 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4908 gimple_seq_add_stmt (dlist, g);
4909 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4911 continue;
4913 else if (pass == 2)
4915 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4916 x = var;
4917 else
4919 bool by_ref = use_pointer_for_field (var, ctx);
4920 x = build_receiver_ref (var, by_ref, ctx);
4922 if (!omp_is_reference (var))
4923 x = build_fold_addr_expr (x);
4924 x = fold_convert (ptr_type_node, x);
4925 unsigned cnt = task_reduction_cnt - 1;
4926 if (!task_reduction_needs_orig_p)
4927 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4928 else
4929 cnt = task_reduction_cntorig - 1;
4930 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4931 size_int (cnt), NULL_TREE, NULL_TREE);
4932 gimplify_assign (r, x, ilist);
4933 continue;
4935 else if (pass == 3)
4937 tree type = TREE_TYPE (new_var);
4938 if (!omp_is_reference (var))
4939 type = build_pointer_type (type);
4940 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4942 unsigned cnt = task_reduction_cnt - 1;
4943 if (!task_reduction_needs_orig_p)
4944 cnt += (task_reduction_cntorig_full
4945 - task_reduction_cntorig);
4946 else
4947 cnt = task_reduction_cntorig - 1;
4948 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4949 size_int (cnt), NULL_TREE, NULL_TREE);
4951 else
4953 unsigned int idx = *ctx->task_reduction_map->get (c);
4954 tree off;
4955 if (ctx->task_reductions[1 + idx])
4956 off = fold_convert (sizetype,
4957 ctx->task_reductions[1 + idx]);
4958 else
4959 off = task_reduction_read (ilist, tskred_temp, sizetype,
4960 7 + 3 * idx + 1);
4961 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4962 tskred_base, off);
4964 x = fold_convert (type, x);
4965 tree t;
4966 if (omp_is_reference (var))
4968 gimplify_assign (new_var, x, ilist);
4969 t = new_var;
4970 new_var = build_simple_mem_ref (new_var);
4972 else
4974 t = create_tmp_var (type);
4975 gimplify_assign (t, x, ilist);
4976 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4977 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4979 t = fold_convert (build_pointer_type (boolean_type_node), t);
4980 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4981 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4982 cond = create_tmp_var (TREE_TYPE (t));
4983 gimplify_assign (cond, t, ilist);
4985 else if (is_variable_sized (var))
4987 /* For variable sized types, we need to allocate the
4988 actual storage here. Call alloca and store the
4989 result in the pointer decl that we created elsewhere. */
4990 if (pass == 0)
4991 continue;
4993 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4995 gcall *stmt;
4996 tree tmp, atmp;
4998 ptr = DECL_VALUE_EXPR (new_var);
4999 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5000 ptr = TREE_OPERAND (ptr, 0);
5001 gcc_assert (DECL_P (ptr));
5002 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5004 /* void *tmp = __builtin_alloca */
5005 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5006 stmt = gimple_build_call (atmp, 2, x,
5007 size_int (DECL_ALIGN (var)));
5008 tmp = create_tmp_var_raw (ptr_type_node);
5009 gimple_add_tmp_var (tmp);
5010 gimple_call_set_lhs (stmt, tmp);
5012 gimple_seq_add_stmt (ilist, stmt);
5014 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5015 gimplify_assign (ptr, x, ilist);
5018 else if (omp_is_reference (var)
5019 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5020 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5022 /* For references that are being privatized for Fortran,
5023 allocate new backing storage for the new pointer
5024 variable. This allows us to avoid changing all the
5025 code that expects a pointer to something that expects
5026 a direct variable. */
5027 if (pass == 0)
5028 continue;
5030 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5031 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5033 x = build_receiver_ref (var, false, ctx);
5034 x = build_fold_addr_expr_loc (clause_loc, x);
5036 else if (TREE_CONSTANT (x))
5038 /* For reduction in SIMD loop, defer adding the
5039 initialization of the reference, because if we decide
5040 to use SIMD array for it, the initilization could cause
5041 expansion ICE. Ditto for other privatization clauses. */
5042 if (is_simd)
5043 x = NULL_TREE;
5044 else
5046 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5047 get_name (var));
5048 gimple_add_tmp_var (x);
5049 TREE_ADDRESSABLE (x) = 1;
5050 x = build_fold_addr_expr_loc (clause_loc, x);
5053 else
5055 tree atmp
5056 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5057 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5058 tree al = size_int (TYPE_ALIGN (rtype));
5059 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5062 if (x)
5064 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5065 gimplify_assign (new_var, x, ilist);
5068 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5070 else if ((c_kind == OMP_CLAUSE_REDUCTION
5071 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5072 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5074 if (pass == 0)
5075 continue;
5077 else if (pass != 0)
5078 continue;
5080 switch (OMP_CLAUSE_CODE (c))
5082 case OMP_CLAUSE_SHARED:
5083 /* Ignore shared directives in teams construct inside
5084 target construct. */
5085 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5086 && !is_host_teams_ctx (ctx))
5087 continue;
5088 /* Shared global vars are just accessed directly. */
5089 if (is_global_var (new_var))
5090 break;
5091 /* For taskloop firstprivate/lastprivate, represented
5092 as firstprivate and shared clause on the task, new_var
5093 is the firstprivate var. */
5094 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5095 break;
5096 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5097 needs to be delayed until after fixup_child_record_type so
5098 that we get the correct type during the dereference. */
5099 by_ref = use_pointer_for_field (var, ctx);
5100 x = build_receiver_ref (var, by_ref, ctx);
5101 SET_DECL_VALUE_EXPR (new_var, x);
5102 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5104 /* ??? If VAR is not passed by reference, and the variable
5105 hasn't been initialized yet, then we'll get a warning for
5106 the store into the omp_data_s structure. Ideally, we'd be
5107 able to notice this and not store anything at all, but
5108 we're generating code too early. Suppress the warning. */
5109 if (!by_ref)
5110 TREE_NO_WARNING (var) = 1;
5111 break;
5113 case OMP_CLAUSE__CONDTEMP_:
5114 if (is_parallel_ctx (ctx))
5116 x = build_receiver_ref (var, false, ctx);
5117 SET_DECL_VALUE_EXPR (new_var, x);
5118 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5120 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5122 x = build_zero_cst (TREE_TYPE (var));
5123 goto do_private;
5125 break;
5127 case OMP_CLAUSE_LASTPRIVATE:
5128 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5129 break;
5130 /* FALLTHRU */
5132 case OMP_CLAUSE_PRIVATE:
5133 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5134 x = build_outer_var_ref (var, ctx);
5135 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5137 if (is_task_ctx (ctx))
5138 x = build_receiver_ref (var, false, ctx);
5139 else
5140 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5142 else
5143 x = NULL;
5144 do_private:
5145 tree nx;
5146 bool copy_ctor;
5147 copy_ctor = false;
5148 nx = unshare_expr (new_var);
5149 if (is_simd
5150 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5151 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5152 copy_ctor = true;
5153 if (copy_ctor)
5154 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5155 else
5156 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5157 if (is_simd)
5159 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5160 if ((TREE_ADDRESSABLE (new_var) || nx || y
5161 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5162 && (gimple_omp_for_collapse (ctx->stmt) != 1
5163 || (gimple_omp_for_index (ctx->stmt, 0)
5164 != new_var)))
5165 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5166 || omp_is_reference (var))
5167 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5168 ivar, lvar))
5170 if (omp_is_reference (var))
5172 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5173 tree new_vard = TREE_OPERAND (new_var, 0);
5174 gcc_assert (DECL_P (new_vard));
5175 SET_DECL_VALUE_EXPR (new_vard,
5176 build_fold_addr_expr (lvar));
5177 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5180 if (nx)
5182 tree iv = unshare_expr (ivar);
5183 if (copy_ctor)
5184 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5186 else
5187 x = lang_hooks.decls.omp_clause_default_ctor (c,
5191 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5193 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5194 unshare_expr (ivar), x);
5195 nx = x;
5197 if (nx && x)
5198 gimplify_and_add (x, &llist[0]);
5199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5200 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5202 tree v = new_var;
5203 if (!DECL_P (v))
5205 gcc_assert (TREE_CODE (v) == MEM_REF);
5206 v = TREE_OPERAND (v, 0);
5207 gcc_assert (DECL_P (v));
5209 v = *ctx->lastprivate_conditional_map->get (v);
5210 tree t = create_tmp_var (TREE_TYPE (v));
5211 tree z = build_zero_cst (TREE_TYPE (v));
5212 tree orig_v
5213 = build_outer_var_ref (var, ctx,
5214 OMP_CLAUSE_LASTPRIVATE);
5215 gimple_seq_add_stmt (dlist,
5216 gimple_build_assign (t, z));
5217 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5218 tree civar = DECL_VALUE_EXPR (v);
5219 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5220 civar = unshare_expr (civar);
5221 TREE_OPERAND (civar, 1) = sctx.idx;
5222 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5223 unshare_expr (civar));
5224 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5225 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5226 orig_v, unshare_expr (ivar)));
5227 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5228 civar);
5229 x = build3 (COND_EXPR, void_type_node, cond, x,
5230 void_node);
5231 gimple_seq tseq = NULL;
5232 gimplify_and_add (x, &tseq);
5233 if (ctx->outer)
5234 lower_omp (&tseq, ctx->outer);
5235 gimple_seq_add_seq (&llist[1], tseq);
5237 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5238 && ctx->for_simd_scan_phase)
5240 x = unshare_expr (ivar);
5241 tree orig_v
5242 = build_outer_var_ref (var, ctx,
5243 OMP_CLAUSE_LASTPRIVATE);
5244 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5245 orig_v);
5246 gimplify_and_add (x, &llist[0]);
5248 if (y)
5250 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5251 if (y)
5252 gimplify_and_add (y, &llist[1]);
5254 break;
5256 if (omp_is_reference (var))
5258 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5259 tree new_vard = TREE_OPERAND (new_var, 0);
5260 gcc_assert (DECL_P (new_vard));
5261 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5262 x = TYPE_SIZE_UNIT (type);
5263 if (TREE_CONSTANT (x))
5265 x = create_tmp_var_raw (type, get_name (var));
5266 gimple_add_tmp_var (x);
5267 TREE_ADDRESSABLE (x) = 1;
5268 x = build_fold_addr_expr_loc (clause_loc, x);
5269 x = fold_convert_loc (clause_loc,
5270 TREE_TYPE (new_vard), x);
5271 gimplify_assign (new_vard, x, ilist);
5275 if (nx)
5276 gimplify_and_add (nx, ilist);
5277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5278 && is_simd
5279 && ctx->for_simd_scan_phase)
5281 tree orig_v = build_outer_var_ref (var, ctx,
5282 OMP_CLAUSE_LASTPRIVATE);
5283 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5284 orig_v);
5285 gimplify_and_add (x, ilist);
5287 /* FALLTHRU */
5289 do_dtor:
5290 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5291 if (x)
5292 gimplify_and_add (x, dlist);
5293 break;
5295 case OMP_CLAUSE_LINEAR:
5296 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5297 goto do_firstprivate;
5298 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5299 x = NULL;
5300 else
5301 x = build_outer_var_ref (var, ctx);
5302 goto do_private;
5304 case OMP_CLAUSE_FIRSTPRIVATE:
5305 if (is_task_ctx (ctx))
5307 if ((omp_is_reference (var)
5308 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5309 || is_variable_sized (var))
5310 goto do_dtor;
5311 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5312 ctx))
5313 || use_pointer_for_field (var, NULL))
5315 x = build_receiver_ref (var, false, ctx);
5316 SET_DECL_VALUE_EXPR (new_var, x);
5317 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5318 goto do_dtor;
5321 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5322 && omp_is_reference (var))
5324 x = build_outer_var_ref (var, ctx);
5325 gcc_assert (TREE_CODE (x) == MEM_REF
5326 && integer_zerop (TREE_OPERAND (x, 1)));
5327 x = TREE_OPERAND (x, 0);
5328 x = lang_hooks.decls.omp_clause_copy_ctor
5329 (c, unshare_expr (new_var), x);
5330 gimplify_and_add (x, ilist);
5331 goto do_dtor;
5333 do_firstprivate:
5334 x = build_outer_var_ref (var, ctx);
5335 if (is_simd)
5337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5338 && gimple_omp_for_combined_into_p (ctx->stmt))
5340 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5341 tree stept = TREE_TYPE (t);
5342 tree ct = omp_find_clause (clauses,
5343 OMP_CLAUSE__LOOPTEMP_);
5344 gcc_assert (ct);
5345 tree l = OMP_CLAUSE_DECL (ct);
5346 tree n1 = fd->loop.n1;
5347 tree step = fd->loop.step;
5348 tree itype = TREE_TYPE (l);
5349 if (POINTER_TYPE_P (itype))
5350 itype = signed_type_for (itype);
5351 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5352 if (TYPE_UNSIGNED (itype)
5353 && fd->loop.cond_code == GT_EXPR)
5354 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5355 fold_build1 (NEGATE_EXPR, itype, l),
5356 fold_build1 (NEGATE_EXPR,
5357 itype, step));
5358 else
5359 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5360 t = fold_build2 (MULT_EXPR, stept,
5361 fold_convert (stept, l), t);
5363 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5365 if (omp_is_reference (var))
5367 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5368 tree new_vard = TREE_OPERAND (new_var, 0);
5369 gcc_assert (DECL_P (new_vard));
5370 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5371 nx = TYPE_SIZE_UNIT (type);
5372 if (TREE_CONSTANT (nx))
5374 nx = create_tmp_var_raw (type,
5375 get_name (var));
5376 gimple_add_tmp_var (nx);
5377 TREE_ADDRESSABLE (nx) = 1;
5378 nx = build_fold_addr_expr_loc (clause_loc,
5379 nx);
5380 nx = fold_convert_loc (clause_loc,
5381 TREE_TYPE (new_vard),
5382 nx);
5383 gimplify_assign (new_vard, nx, ilist);
5387 x = lang_hooks.decls.omp_clause_linear_ctor
5388 (c, new_var, x, t);
5389 gimplify_and_add (x, ilist);
5390 goto do_dtor;
5393 if (POINTER_TYPE_P (TREE_TYPE (x)))
5394 x = fold_build2 (POINTER_PLUS_EXPR,
5395 TREE_TYPE (x), x, t);
5396 else
5397 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5400 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5401 || TREE_ADDRESSABLE (new_var)
5402 || omp_is_reference (var))
5403 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5404 ivar, lvar))
5406 if (omp_is_reference (var))
5408 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5409 tree new_vard = TREE_OPERAND (new_var, 0);
5410 gcc_assert (DECL_P (new_vard));
5411 SET_DECL_VALUE_EXPR (new_vard,
5412 build_fold_addr_expr (lvar));
5413 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5415 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5417 tree iv = create_tmp_var (TREE_TYPE (new_var));
5418 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5419 gimplify_and_add (x, ilist);
5420 gimple_stmt_iterator gsi
5421 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5422 gassign *g
5423 = gimple_build_assign (unshare_expr (lvar), iv);
5424 gsi_insert_before_without_update (&gsi, g,
5425 GSI_SAME_STMT);
5426 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5427 enum tree_code code = PLUS_EXPR;
5428 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5429 code = POINTER_PLUS_EXPR;
5430 g = gimple_build_assign (iv, code, iv, t);
5431 gsi_insert_before_without_update (&gsi, g,
5432 GSI_SAME_STMT);
5433 break;
5435 x = lang_hooks.decls.omp_clause_copy_ctor
5436 (c, unshare_expr (ivar), x);
5437 gimplify_and_add (x, &llist[0]);
5438 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5439 if (x)
5440 gimplify_and_add (x, &llist[1]);
5441 break;
5443 if (omp_is_reference (var))
5445 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5446 tree new_vard = TREE_OPERAND (new_var, 0);
5447 gcc_assert (DECL_P (new_vard));
5448 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5449 nx = TYPE_SIZE_UNIT (type);
5450 if (TREE_CONSTANT (nx))
5452 nx = create_tmp_var_raw (type, get_name (var));
5453 gimple_add_tmp_var (nx);
5454 TREE_ADDRESSABLE (nx) = 1;
5455 nx = build_fold_addr_expr_loc (clause_loc, nx);
5456 nx = fold_convert_loc (clause_loc,
5457 TREE_TYPE (new_vard), nx);
5458 gimplify_assign (new_vard, nx, ilist);
5462 x = lang_hooks.decls.omp_clause_copy_ctor
5463 (c, unshare_expr (new_var), x);
5464 gimplify_and_add (x, ilist);
5465 goto do_dtor;
5467 case OMP_CLAUSE__LOOPTEMP_:
5468 case OMP_CLAUSE__REDUCTEMP_:
5469 gcc_assert (is_taskreg_ctx (ctx));
5470 x = build_outer_var_ref (var, ctx);
5471 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5472 gimplify_and_add (x, ilist);
5473 break;
5475 case OMP_CLAUSE_COPYIN:
5476 by_ref = use_pointer_for_field (var, NULL);
5477 x = build_receiver_ref (var, by_ref, ctx);
5478 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5479 append_to_statement_list (x, &copyin_seq);
5480 copyin_by_ref |= by_ref;
5481 break;
5483 case OMP_CLAUSE_REDUCTION:
5484 case OMP_CLAUSE_IN_REDUCTION:
5485 /* OpenACC reductions are initialized using the
5486 GOACC_REDUCTION internal function. */
5487 if (is_gimple_omp_oacc (ctx->stmt))
5488 break;
5489 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5491 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5492 gimple *tseq;
5493 tree ptype = TREE_TYPE (placeholder);
5494 if (cond)
5496 x = error_mark_node;
5497 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5498 && !task_reduction_needs_orig_p)
5499 x = var;
5500 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5502 tree pptype = build_pointer_type (ptype);
5503 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5504 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5505 size_int (task_reduction_cnt_full
5506 + task_reduction_cntorig - 1),
5507 NULL_TREE, NULL_TREE);
5508 else
5510 unsigned int idx
5511 = *ctx->task_reduction_map->get (c);
5512 x = task_reduction_read (ilist, tskred_temp,
5513 pptype, 7 + 3 * idx);
5515 x = fold_convert (pptype, x);
5516 x = build_simple_mem_ref (x);
5519 else
5521 x = build_outer_var_ref (var, ctx);
5523 if (omp_is_reference (var)
5524 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5525 x = build_fold_addr_expr_loc (clause_loc, x);
5527 SET_DECL_VALUE_EXPR (placeholder, x);
5528 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5529 tree new_vard = new_var;
5530 if (omp_is_reference (var))
5532 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5533 new_vard = TREE_OPERAND (new_var, 0);
5534 gcc_assert (DECL_P (new_vard));
5536 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5537 if (is_simd
5538 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5539 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5540 rvarp = &rvar;
5541 if (is_simd
5542 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5543 ivar, lvar, rvarp,
5544 &rvar2))
5546 if (new_vard == new_var)
5548 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5549 SET_DECL_VALUE_EXPR (new_var, ivar);
5551 else
5553 SET_DECL_VALUE_EXPR (new_vard,
5554 build_fold_addr_expr (ivar));
5555 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5557 x = lang_hooks.decls.omp_clause_default_ctor
5558 (c, unshare_expr (ivar),
5559 build_outer_var_ref (var, ctx));
5560 if (rvarp && ctx->for_simd_scan_phase)
5562 if (x)
5563 gimplify_and_add (x, &llist[0]);
5564 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5565 if (x)
5566 gimplify_and_add (x, &llist[1]);
5567 break;
5569 else if (rvarp)
5571 if (x)
5573 gimplify_and_add (x, &llist[0]);
5575 tree ivar2 = unshare_expr (lvar);
5576 TREE_OPERAND (ivar2, 1) = sctx.idx;
5577 x = lang_hooks.decls.omp_clause_default_ctor
5578 (c, ivar2, build_outer_var_ref (var, ctx));
5579 gimplify_and_add (x, &llist[0]);
5581 if (rvar2)
5583 x = lang_hooks.decls.omp_clause_default_ctor
5584 (c, unshare_expr (rvar2),
5585 build_outer_var_ref (var, ctx));
5586 gimplify_and_add (x, &llist[0]);
5589 /* For types that need construction, add another
5590 private var which will be default constructed
5591 and optionally initialized with
5592 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5593 loop we want to assign this value instead of
5594 constructing and destructing it in each
5595 iteration. */
5596 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5597 gimple_add_tmp_var (nv);
5598 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5599 ? rvar2
5600 : ivar, 0),
5601 nv);
5602 x = lang_hooks.decls.omp_clause_default_ctor
5603 (c, nv, build_outer_var_ref (var, ctx));
5604 gimplify_and_add (x, ilist);
5606 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5608 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5609 x = DECL_VALUE_EXPR (new_vard);
5610 tree vexpr = nv;
5611 if (new_vard != new_var)
5612 vexpr = build_fold_addr_expr (nv);
5613 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5614 lower_omp (&tseq, ctx);
5615 SET_DECL_VALUE_EXPR (new_vard, x);
5616 gimple_seq_add_seq (ilist, tseq);
5617 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5620 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5621 if (x)
5622 gimplify_and_add (x, dlist);
5625 tree ref = build_outer_var_ref (var, ctx);
5626 x = unshare_expr (ivar);
5627 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5628 ref);
5629 gimplify_and_add (x, &llist[0]);
5631 ref = build_outer_var_ref (var, ctx);
5632 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5633 rvar);
5634 gimplify_and_add (x, &llist[3]);
5636 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5637 if (new_vard == new_var)
5638 SET_DECL_VALUE_EXPR (new_var, lvar);
5639 else
5640 SET_DECL_VALUE_EXPR (new_vard,
5641 build_fold_addr_expr (lvar));
5643 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5644 if (x)
5645 gimplify_and_add (x, &llist[1]);
5647 tree ivar2 = unshare_expr (lvar);
5648 TREE_OPERAND (ivar2, 1) = sctx.idx;
5649 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5650 if (x)
5651 gimplify_and_add (x, &llist[1]);
5653 if (rvar2)
5655 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5656 if (x)
5657 gimplify_and_add (x, &llist[1]);
5659 break;
5661 if (x)
5662 gimplify_and_add (x, &llist[0]);
5663 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5665 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5666 lower_omp (&tseq, ctx);
5667 gimple_seq_add_seq (&llist[0], tseq);
5669 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5670 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5671 lower_omp (&tseq, ctx);
5672 gimple_seq_add_seq (&llist[1], tseq);
5673 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5674 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5675 if (new_vard == new_var)
5676 SET_DECL_VALUE_EXPR (new_var, lvar);
5677 else
5678 SET_DECL_VALUE_EXPR (new_vard,
5679 build_fold_addr_expr (lvar));
5680 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5681 if (x)
5682 gimplify_and_add (x, &llist[1]);
5683 break;
5685 /* If this is a reference to constant size reduction var
5686 with placeholder, we haven't emitted the initializer
5687 for it because it is undesirable if SIMD arrays are used.
5688 But if they aren't used, we need to emit the deferred
5689 initialization now. */
5690 else if (omp_is_reference (var) && is_simd)
5691 handle_simd_reference (clause_loc, new_vard, ilist);
5693 tree lab2 = NULL_TREE;
5694 if (cond)
5696 gimple *g;
5697 if (!is_parallel_ctx (ctx))
5699 tree condv = create_tmp_var (boolean_type_node);
5700 tree m = build_simple_mem_ref (cond);
5701 g = gimple_build_assign (condv, m);
5702 gimple_seq_add_stmt (ilist, g);
5703 tree lab1
5704 = create_artificial_label (UNKNOWN_LOCATION);
5705 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5706 g = gimple_build_cond (NE_EXPR, condv,
5707 boolean_false_node,
5708 lab2, lab1);
5709 gimple_seq_add_stmt (ilist, g);
5710 gimple_seq_add_stmt (ilist,
5711 gimple_build_label (lab1));
5713 g = gimple_build_assign (build_simple_mem_ref (cond),
5714 boolean_true_node);
5715 gimple_seq_add_stmt (ilist, g);
5717 x = lang_hooks.decls.omp_clause_default_ctor
5718 (c, unshare_expr (new_var),
5719 cond ? NULL_TREE
5720 : build_outer_var_ref (var, ctx));
5721 if (x)
5722 gimplify_and_add (x, ilist);
5724 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5725 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5727 if (ctx->for_simd_scan_phase)
5728 goto do_dtor;
5729 if (x || (!is_simd
5730 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5732 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5733 gimple_add_tmp_var (nv);
5734 ctx->cb.decl_map->put (new_vard, nv);
5735 x = lang_hooks.decls.omp_clause_default_ctor
5736 (c, nv, build_outer_var_ref (var, ctx));
5737 if (x)
5738 gimplify_and_add (x, ilist);
5739 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5741 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5742 tree vexpr = nv;
5743 if (new_vard != new_var)
5744 vexpr = build_fold_addr_expr (nv);
5745 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5746 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5747 lower_omp (&tseq, ctx);
5748 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5749 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5750 gimple_seq_add_seq (ilist, tseq);
5752 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5753 if (is_simd && ctx->scan_exclusive)
5755 tree nv2
5756 = create_tmp_var_raw (TREE_TYPE (new_var));
5757 gimple_add_tmp_var (nv2);
5758 ctx->cb.decl_map->put (nv, nv2);
5759 x = lang_hooks.decls.omp_clause_default_ctor
5760 (c, nv2, build_outer_var_ref (var, ctx));
5761 gimplify_and_add (x, ilist);
5762 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5763 if (x)
5764 gimplify_and_add (x, dlist);
5766 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5767 if (x)
5768 gimplify_and_add (x, dlist);
5770 else if (is_simd
5771 && ctx->scan_exclusive
5772 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5774 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5775 gimple_add_tmp_var (nv2);
5776 ctx->cb.decl_map->put (new_vard, nv2);
5777 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5778 if (x)
5779 gimplify_and_add (x, dlist);
5781 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5782 goto do_dtor;
5785 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5787 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5788 lower_omp (&tseq, ctx);
5789 gimple_seq_add_seq (ilist, tseq);
5791 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5792 if (is_simd)
5794 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5795 lower_omp (&tseq, ctx);
5796 gimple_seq_add_seq (dlist, tseq);
5797 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5799 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5800 if (cond)
5802 if (lab2)
5803 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5804 break;
5806 goto do_dtor;
5808 else
5810 x = omp_reduction_init (c, TREE_TYPE (new_var));
5811 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5812 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5814 if (cond)
5816 gimple *g;
5817 tree lab2 = NULL_TREE;
5818 /* GOMP_taskgroup_reduction_register memsets the whole
5819 array to zero. If the initializer is zero, we don't
5820 need to initialize it again, just mark it as ever
5821 used unconditionally, i.e. cond = true. */
5822 if (initializer_zerop (x))
5824 g = gimple_build_assign (build_simple_mem_ref (cond),
5825 boolean_true_node);
5826 gimple_seq_add_stmt (ilist, g);
5827 break;
5830 /* Otherwise, emit
5831 if (!cond) { cond = true; new_var = x; } */
5832 if (!is_parallel_ctx (ctx))
5834 tree condv = create_tmp_var (boolean_type_node);
5835 tree m = build_simple_mem_ref (cond);
5836 g = gimple_build_assign (condv, m);
5837 gimple_seq_add_stmt (ilist, g);
5838 tree lab1
5839 = create_artificial_label (UNKNOWN_LOCATION);
5840 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5841 g = gimple_build_cond (NE_EXPR, condv,
5842 boolean_false_node,
5843 lab2, lab1);
5844 gimple_seq_add_stmt (ilist, g);
5845 gimple_seq_add_stmt (ilist,
5846 gimple_build_label (lab1));
5848 g = gimple_build_assign (build_simple_mem_ref (cond),
5849 boolean_true_node);
5850 gimple_seq_add_stmt (ilist, g);
5851 gimplify_assign (new_var, x, ilist);
5852 if (lab2)
5853 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5854 break;
5857 /* reduction(-:var) sums up the partial results, so it
5858 acts identically to reduction(+:var). */
5859 if (code == MINUS_EXPR)
5860 code = PLUS_EXPR;
5862 tree new_vard = new_var;
5863 if (is_simd && omp_is_reference (var))
5865 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5866 new_vard = TREE_OPERAND (new_var, 0);
5867 gcc_assert (DECL_P (new_vard));
5869 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5870 if (is_simd
5871 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5872 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5873 rvarp = &rvar;
5874 if (is_simd
5875 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5876 ivar, lvar, rvarp,
5877 &rvar2))
5879 if (new_vard != new_var)
5881 SET_DECL_VALUE_EXPR (new_vard,
5882 build_fold_addr_expr (lvar));
5883 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5886 tree ref = build_outer_var_ref (var, ctx);
5888 if (rvarp)
5890 if (ctx->for_simd_scan_phase)
5891 break;
5892 gimplify_assign (ivar, ref, &llist[0]);
5893 ref = build_outer_var_ref (var, ctx);
5894 gimplify_assign (ref, rvar, &llist[3]);
5895 break;
5898 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5900 if (sctx.is_simt)
5902 if (!simt_lane)
5903 simt_lane = create_tmp_var (unsigned_type_node);
5904 x = build_call_expr_internal_loc
5905 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5906 TREE_TYPE (ivar), 2, ivar, simt_lane);
5907 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5908 gimplify_assign (ivar, x, &llist[2]);
5910 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5911 ref = build_outer_var_ref (var, ctx);
5912 gimplify_assign (ref, x, &llist[1]);
5915 else
5917 if (omp_is_reference (var) && is_simd)
5918 handle_simd_reference (clause_loc, new_vard, ilist);
5919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5920 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5921 break;
5922 gimplify_assign (new_var, x, ilist);
5923 if (is_simd)
5925 tree ref = build_outer_var_ref (var, ctx);
5927 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5928 ref = build_outer_var_ref (var, ctx);
5929 gimplify_assign (ref, x, dlist);
5933 break;
5935 default:
5936 gcc_unreachable ();
5940 if (tskred_avar)
5942 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5943 TREE_THIS_VOLATILE (clobber) = 1;
5944 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5947 if (known_eq (sctx.max_vf, 1U))
5949 sctx.is_simt = false;
5950 if (ctx->lastprivate_conditional_map)
5952 if (gimple_omp_for_combined_into_p (ctx->stmt))
5954 /* Signal to lower_omp_1 that it should use parent context. */
5955 ctx->combined_into_simd_safelen1 = true;
5956 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5957 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5958 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5960 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5961 omp_context *outer = ctx->outer;
5962 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
5963 outer = outer->outer;
5964 tree *v = ctx->lastprivate_conditional_map->get (o);
5965 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
5966 tree *pv = outer->lastprivate_conditional_map->get (po);
5967 *v = *pv;
5970 else
5972 /* When not vectorized, treat lastprivate(conditional:) like
5973 normal lastprivate, as there will be just one simd lane
5974 writing the privatized variable. */
5975 delete ctx->lastprivate_conditional_map;
5976 ctx->lastprivate_conditional_map = NULL;
5981 if (nonconst_simd_if)
5983 if (sctx.lane == NULL_TREE)
5985 sctx.idx = create_tmp_var (unsigned_type_node);
5986 sctx.lane = create_tmp_var (unsigned_type_node);
5988 /* FIXME: For now. */
5989 sctx.is_simt = false;
5992 if (sctx.lane || sctx.is_simt)
5994 uid = create_tmp_var (ptr_type_node, "simduid");
5995 /* Don't want uninit warnings on simduid, it is always uninitialized,
5996 but we use it not for the value, but for the DECL_UID only. */
5997 TREE_NO_WARNING (uid) = 1;
5998 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5999 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6000 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6001 gimple_omp_for_set_clauses (ctx->stmt, c);
6003 /* Emit calls denoting privatized variables and initializing a pointer to
6004 structure that holds private variables as fields after ompdevlow pass. */
6005 if (sctx.is_simt)
6007 sctx.simt_eargs[0] = uid;
6008 gimple *g
6009 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6010 gimple_call_set_lhs (g, uid);
6011 gimple_seq_add_stmt (ilist, g);
6012 sctx.simt_eargs.release ();
6014 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6015 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6016 gimple_call_set_lhs (g, simtrec);
6017 gimple_seq_add_stmt (ilist, g);
6019 if (sctx.lane)
6021 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6022 2 + (nonconst_simd_if != NULL),
6023 uid, integer_zero_node,
6024 nonconst_simd_if);
6025 gimple_call_set_lhs (g, sctx.lane);
6026 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6027 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6028 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6029 build_int_cst (unsigned_type_node, 0));
6030 gimple_seq_add_stmt (ilist, g);
6031 if (sctx.lastlane)
6033 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6034 2, uid, sctx.lane);
6035 gimple_call_set_lhs (g, sctx.lastlane);
6036 gimple_seq_add_stmt (dlist, g);
6037 gimple_seq_add_seq (dlist, llist[3]);
6039 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6040 if (llist[2])
6042 tree simt_vf = create_tmp_var (unsigned_type_node);
6043 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6044 gimple_call_set_lhs (g, simt_vf);
6045 gimple_seq_add_stmt (dlist, g);
6047 tree t = build_int_cst (unsigned_type_node, 1);
6048 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6049 gimple_seq_add_stmt (dlist, g);
6051 t = build_int_cst (unsigned_type_node, 0);
6052 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6053 gimple_seq_add_stmt (dlist, g);
6055 tree body = create_artificial_label (UNKNOWN_LOCATION);
6056 tree header = create_artificial_label (UNKNOWN_LOCATION);
6057 tree end = create_artificial_label (UNKNOWN_LOCATION);
6058 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6059 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6061 gimple_seq_add_seq (dlist, llist[2]);
6063 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6064 gimple_seq_add_stmt (dlist, g);
6066 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6067 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6068 gimple_seq_add_stmt (dlist, g);
6070 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6072 for (int i = 0; i < 2; i++)
6073 if (llist[i])
6075 tree vf = create_tmp_var (unsigned_type_node);
6076 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6077 gimple_call_set_lhs (g, vf);
6078 gimple_seq *seq = i == 0 ? ilist : dlist;
6079 gimple_seq_add_stmt (seq, g);
6080 tree t = build_int_cst (unsigned_type_node, 0);
6081 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6082 gimple_seq_add_stmt (seq, g);
6083 tree body = create_artificial_label (UNKNOWN_LOCATION);
6084 tree header = create_artificial_label (UNKNOWN_LOCATION);
6085 tree end = create_artificial_label (UNKNOWN_LOCATION);
6086 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6087 gimple_seq_add_stmt (seq, gimple_build_label (body));
6088 gimple_seq_add_seq (seq, llist[i]);
6089 t = build_int_cst (unsigned_type_node, 1);
6090 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6091 gimple_seq_add_stmt (seq, g);
6092 gimple_seq_add_stmt (seq, gimple_build_label (header));
6093 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6094 gimple_seq_add_stmt (seq, g);
6095 gimple_seq_add_stmt (seq, gimple_build_label (end));
6098 if (sctx.is_simt)
6100 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6101 gimple *g
6102 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6103 gimple_seq_add_stmt (dlist, g);
6106 /* The copyin sequence is not to be executed by the main thread, since
6107 that would result in self-copies. Perhaps not visible to scalars,
6108 but it certainly is to C++ operator=. */
6109 if (copyin_seq)
6111 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6113 x = build2 (NE_EXPR, boolean_type_node, x,
6114 build_int_cst (TREE_TYPE (x), 0));
6115 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6116 gimplify_and_add (x, ilist);
6119 /* If any copyin variable is passed by reference, we must ensure the
6120 master thread doesn't modify it before it is copied over in all
6121 threads. Similarly for variables in both firstprivate and
6122 lastprivate clauses we need to ensure the lastprivate copying
6123 happens after firstprivate copying in all threads. And similarly
6124 for UDRs if initializer expression refers to omp_orig. */
6125 if (copyin_by_ref || lastprivate_firstprivate
6126 || (reduction_omp_orig_ref
6127 && !ctx->scan_inclusive
6128 && !ctx->scan_exclusive))
6130 /* Don't add any barrier for #pragma omp simd or
6131 #pragma omp distribute. */
6132 if (!is_task_ctx (ctx)
6133 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6134 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6135 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6138 /* If max_vf is non-zero, then we can use only a vectorization factor
6139 up to the max_vf we chose. So stick it into the safelen clause. */
6140 if (maybe_ne (sctx.max_vf, 0U))
6142 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6143 OMP_CLAUSE_SAFELEN);
6144 poly_uint64 safe_len;
6145 if (c == NULL_TREE
6146 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6147 && maybe_gt (safe_len, sctx.max_vf)))
6149 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6150 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6151 sctx.max_vf);
6152 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6153 gimple_omp_for_set_clauses (ctx->stmt, c);
6158 /* Create temporary variables for lastprivate(conditional:) implementation
6159 in context CTX with CLAUSES. */
6161 static void
6162 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6164 tree iter_type = NULL_TREE;
6165 tree cond_ptr = NULL_TREE;
6166 tree iter_var = NULL_TREE;
6167 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6168 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6169 tree next = *clauses;
6170 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6171 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6172 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6174 if (is_simd)
6176 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6177 gcc_assert (cc);
6178 if (iter_type == NULL_TREE)
6180 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6181 iter_var = create_tmp_var_raw (iter_type);
6182 DECL_CONTEXT (iter_var) = current_function_decl;
6183 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6184 DECL_CHAIN (iter_var) = ctx->block_vars;
6185 ctx->block_vars = iter_var;
6186 tree c3
6187 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6188 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6189 OMP_CLAUSE_DECL (c3) = iter_var;
6190 OMP_CLAUSE_CHAIN (c3) = *clauses;
6191 *clauses = c3;
6192 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6194 next = OMP_CLAUSE_CHAIN (cc);
6195 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6196 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6197 ctx->lastprivate_conditional_map->put (o, v);
6198 continue;
6200 if (iter_type == NULL)
6202 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6204 struct omp_for_data fd;
6205 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6206 NULL);
6207 iter_type = unsigned_type_for (fd.iter_type);
6209 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6210 iter_type = unsigned_type_node;
6211 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6212 if (c2)
6214 cond_ptr
6215 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6216 OMP_CLAUSE_DECL (c2) = cond_ptr;
6218 else
6220 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6221 DECL_CONTEXT (cond_ptr) = current_function_decl;
6222 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6223 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6224 ctx->block_vars = cond_ptr;
6225 c2 = build_omp_clause (UNKNOWN_LOCATION,
6226 OMP_CLAUSE__CONDTEMP_);
6227 OMP_CLAUSE_DECL (c2) = cond_ptr;
6228 OMP_CLAUSE_CHAIN (c2) = *clauses;
6229 *clauses = c2;
6231 iter_var = create_tmp_var_raw (iter_type);
6232 DECL_CONTEXT (iter_var) = current_function_decl;
6233 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6234 DECL_CHAIN (iter_var) = ctx->block_vars;
6235 ctx->block_vars = iter_var;
6236 tree c3
6237 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6238 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6239 OMP_CLAUSE_DECL (c3) = iter_var;
6240 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6241 OMP_CLAUSE_CHAIN (c2) = c3;
6242 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6244 tree v = create_tmp_var_raw (iter_type);
6245 DECL_CONTEXT (v) = current_function_decl;
6246 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6247 DECL_CHAIN (v) = ctx->block_vars;
6248 ctx->block_vars = v;
6249 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6250 ctx->lastprivate_conditional_map->put (o, v);
6255 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6256 both parallel and workshare constructs. PREDICATE may be NULL if it's
6257 always true. BODY_P is the sequence to insert early initialization
6258 if needed, STMT_LIST is where the non-conditional lastprivate handling
6259 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6260 section. */
6262 static void
6263 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6264 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6265 omp_context *ctx)
6267 tree x, c, label = NULL, orig_clauses = clauses;
6268 bool par_clauses = false;
6269 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6270 unsigned HOST_WIDE_INT conditional_off = 0;
6271 gimple_seq post_stmt_list = NULL;
6273 /* Early exit if there are no lastprivate or linear clauses. */
6274 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6275 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6276 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6277 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6278 break;
6279 if (clauses == NULL)
6281 /* If this was a workshare clause, see if it had been combined
6282 with its parallel. In that case, look for the clauses on the
6283 parallel statement itself. */
6284 if (is_parallel_ctx (ctx))
6285 return;
6287 ctx = ctx->outer;
6288 if (ctx == NULL || !is_parallel_ctx (ctx))
6289 return;
6291 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6292 OMP_CLAUSE_LASTPRIVATE);
6293 if (clauses == NULL)
6294 return;
6295 par_clauses = true;
6298 bool maybe_simt = false;
6299 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6300 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6302 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6303 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6304 if (simduid)
6305 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6308 if (predicate)
6310 gcond *stmt;
6311 tree label_true, arm1, arm2;
6312 enum tree_code pred_code = TREE_CODE (predicate);
6314 label = create_artificial_label (UNKNOWN_LOCATION);
6315 label_true = create_artificial_label (UNKNOWN_LOCATION);
6316 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6318 arm1 = TREE_OPERAND (predicate, 0);
6319 arm2 = TREE_OPERAND (predicate, 1);
6320 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6321 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6323 else
6325 arm1 = predicate;
6326 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6327 arm2 = boolean_false_node;
6328 pred_code = NE_EXPR;
6330 if (maybe_simt)
6332 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6333 c = fold_convert (integer_type_node, c);
6334 simtcond = create_tmp_var (integer_type_node);
6335 gimplify_assign (simtcond, c, stmt_list);
6336 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6337 1, simtcond);
6338 c = create_tmp_var (integer_type_node);
6339 gimple_call_set_lhs (g, c);
6340 gimple_seq_add_stmt (stmt_list, g);
6341 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6342 label_true, label);
6344 else
6345 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6346 gimple_seq_add_stmt (stmt_list, stmt);
6347 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6350 tree cond_ptr = NULL_TREE;
6351 for (c = clauses; c ;)
6353 tree var, new_var;
6354 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6355 gimple_seq *this_stmt_list = stmt_list;
6356 tree lab2 = NULL_TREE;
6358 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6359 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6360 && ctx->lastprivate_conditional_map
6361 && !ctx->combined_into_simd_safelen1)
6363 gcc_assert (body_p);
6364 if (simduid)
6365 goto next;
6366 if (cond_ptr == NULL_TREE)
6368 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6369 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6371 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6372 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6373 tree v = *ctx->lastprivate_conditional_map->get (o);
6374 gimplify_assign (v, build_zero_cst (type), body_p);
6375 this_stmt_list = cstmt_list;
6376 tree mem;
6377 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6379 mem = build2 (MEM_REF, type, cond_ptr,
6380 build_int_cst (TREE_TYPE (cond_ptr),
6381 conditional_off));
6382 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6384 else
6385 mem = build4 (ARRAY_REF, type, cond_ptr,
6386 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6387 tree mem2 = copy_node (mem);
6388 gimple_seq seq = NULL;
6389 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6390 gimple_seq_add_seq (this_stmt_list, seq);
6391 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6392 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6393 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6394 gimple_seq_add_stmt (this_stmt_list, g);
6395 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6396 gimplify_assign (mem2, v, this_stmt_list);
6398 else if (predicate
6399 && ctx->combined_into_simd_safelen1
6400 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6401 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6402 && ctx->lastprivate_conditional_map)
6403 this_stmt_list = &post_stmt_list;
6405 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6406 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6407 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6409 var = OMP_CLAUSE_DECL (c);
6410 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6411 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6412 && is_taskloop_ctx (ctx))
6414 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6415 new_var = lookup_decl (var, ctx->outer);
6417 else
6419 new_var = lookup_decl (var, ctx);
6420 /* Avoid uninitialized warnings for lastprivate and
6421 for linear iterators. */
6422 if (predicate
6423 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6424 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6425 TREE_NO_WARNING (new_var) = 1;
6428 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6430 tree val = DECL_VALUE_EXPR (new_var);
6431 if (TREE_CODE (val) == ARRAY_REF
6432 && VAR_P (TREE_OPERAND (val, 0))
6433 && lookup_attribute ("omp simd array",
6434 DECL_ATTRIBUTES (TREE_OPERAND (val,
6435 0))))
6437 if (lastlane == NULL)
6439 lastlane = create_tmp_var (unsigned_type_node);
6440 gcall *g
6441 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6442 2, simduid,
6443 TREE_OPERAND (val, 1));
6444 gimple_call_set_lhs (g, lastlane);
6445 gimple_seq_add_stmt (this_stmt_list, g);
6447 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6448 TREE_OPERAND (val, 0), lastlane,
6449 NULL_TREE, NULL_TREE);
6450 TREE_THIS_NOTRAP (new_var) = 1;
6453 else if (maybe_simt)
6455 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6456 ? DECL_VALUE_EXPR (new_var)
6457 : new_var);
6458 if (simtlast == NULL)
6460 simtlast = create_tmp_var (unsigned_type_node);
6461 gcall *g = gimple_build_call_internal
6462 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6463 gimple_call_set_lhs (g, simtlast);
6464 gimple_seq_add_stmt (this_stmt_list, g);
6466 x = build_call_expr_internal_loc
6467 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6468 TREE_TYPE (val), 2, val, simtlast);
6469 new_var = unshare_expr (new_var);
6470 gimplify_assign (new_var, x, this_stmt_list);
6471 new_var = unshare_expr (new_var);
6474 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6475 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6477 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6478 gimple_seq_add_seq (this_stmt_list,
6479 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6480 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6482 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6483 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6485 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6486 gimple_seq_add_seq (this_stmt_list,
6487 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6488 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6491 x = NULL_TREE;
6492 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6493 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6494 && is_taskloop_ctx (ctx))
6496 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6497 ctx->outer->outer);
6498 if (is_global_var (ovar))
6499 x = ovar;
6501 if (!x)
6502 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6503 if (omp_is_reference (var))
6504 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6505 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6506 gimplify_and_add (x, this_stmt_list);
6508 if (lab2)
6509 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6512 next:
6513 c = OMP_CLAUSE_CHAIN (c);
6514 if (c == NULL && !par_clauses)
6516 /* If this was a workshare clause, see if it had been combined
6517 with its parallel. In that case, continue looking for the
6518 clauses also on the parallel statement itself. */
6519 if (is_parallel_ctx (ctx))
6520 break;
6522 ctx = ctx->outer;
6523 if (ctx == NULL || !is_parallel_ctx (ctx))
6524 break;
6526 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6527 OMP_CLAUSE_LASTPRIVATE);
6528 par_clauses = true;
6532 if (label)
6533 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6534 gimple_seq_add_seq (stmt_list, post_stmt_list);
6537 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6538 (which might be a placeholder). INNER is true if this is an inner
6539 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6540 join markers. Generate the before-loop forking sequence in
6541 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6542 general form of these sequences is
6544 GOACC_REDUCTION_SETUP
6545 GOACC_FORK
6546 GOACC_REDUCTION_INIT
6548 GOACC_REDUCTION_FINI
6549 GOACC_JOIN
6550 GOACC_REDUCTION_TEARDOWN. */
6552 static void
6553 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6554 gcall *fork, gcall *join, gimple_seq *fork_seq,
6555 gimple_seq *join_seq, omp_context *ctx)
6557 gimple_seq before_fork = NULL;
6558 gimple_seq after_fork = NULL;
6559 gimple_seq before_join = NULL;
6560 gimple_seq after_join = NULL;
6561 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6562 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6563 unsigned offset = 0;
6565 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6566 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6568 tree orig = OMP_CLAUSE_DECL (c);
6569 tree var = maybe_lookup_decl (orig, ctx);
6570 tree ref_to_res = NULL_TREE;
6571 tree incoming, outgoing, v1, v2, v3;
6572 bool is_private = false;
6574 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6575 if (rcode == MINUS_EXPR)
6576 rcode = PLUS_EXPR;
6577 else if (rcode == TRUTH_ANDIF_EXPR)
6578 rcode = BIT_AND_EXPR;
6579 else if (rcode == TRUTH_ORIF_EXPR)
6580 rcode = BIT_IOR_EXPR;
6581 tree op = build_int_cst (unsigned_type_node, rcode);
6583 if (!var)
6584 var = orig;
6586 incoming = outgoing = var;
6588 if (!inner)
6590 /* See if an outer construct also reduces this variable. */
6591 omp_context *outer = ctx;
6593 while (omp_context *probe = outer->outer)
6595 enum gimple_code type = gimple_code (probe->stmt);
6596 tree cls;
6598 switch (type)
6600 case GIMPLE_OMP_FOR:
6601 cls = gimple_omp_for_clauses (probe->stmt);
6602 break;
6604 case GIMPLE_OMP_TARGET:
6605 if (gimple_omp_target_kind (probe->stmt)
6606 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6607 goto do_lookup;
6609 cls = gimple_omp_target_clauses (probe->stmt);
6610 break;
6612 default:
6613 goto do_lookup;
6616 outer = probe;
6617 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6618 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6619 && orig == OMP_CLAUSE_DECL (cls))
6621 incoming = outgoing = lookup_decl (orig, probe);
6622 goto has_outer_reduction;
6624 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6625 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6626 && orig == OMP_CLAUSE_DECL (cls))
6628 is_private = true;
6629 goto do_lookup;
6633 do_lookup:
6634 /* This is the outermost construct with this reduction,
6635 see if there's a mapping for it. */
6636 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6637 && maybe_lookup_field (orig, outer) && !is_private)
6639 ref_to_res = build_receiver_ref (orig, false, outer);
6640 if (omp_is_reference (orig))
6641 ref_to_res = build_simple_mem_ref (ref_to_res);
6643 tree type = TREE_TYPE (var);
6644 if (POINTER_TYPE_P (type))
6645 type = TREE_TYPE (type);
6647 outgoing = var;
6648 incoming = omp_reduction_init_op (loc, rcode, type);
6650 else
6652 /* Try to look at enclosing contexts for reduction var,
6653 use original if no mapping found. */
6654 tree t = NULL_TREE;
6655 omp_context *c = ctx->outer;
6656 while (c && !t)
6658 t = maybe_lookup_decl (orig, c);
6659 c = c->outer;
6661 incoming = outgoing = (t ? t : orig);
6664 has_outer_reduction:;
6667 if (!ref_to_res)
6668 ref_to_res = integer_zero_node;
6670 if (omp_is_reference (orig))
6672 tree type = TREE_TYPE (var);
6673 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6675 if (!inner)
6677 tree x = create_tmp_var (TREE_TYPE (type), id);
6678 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6681 v1 = create_tmp_var (type, id);
6682 v2 = create_tmp_var (type, id);
6683 v3 = create_tmp_var (type, id);
6685 gimplify_assign (v1, var, fork_seq);
6686 gimplify_assign (v2, var, fork_seq);
6687 gimplify_assign (v3, var, fork_seq);
6689 var = build_simple_mem_ref (var);
6690 v1 = build_simple_mem_ref (v1);
6691 v2 = build_simple_mem_ref (v2);
6692 v3 = build_simple_mem_ref (v3);
6693 outgoing = build_simple_mem_ref (outgoing);
6695 if (!TREE_CONSTANT (incoming))
6696 incoming = build_simple_mem_ref (incoming);
6698 else
6699 v1 = v2 = v3 = var;
6701 /* Determine position in reduction buffer, which may be used
6702 by target. The parser has ensured that this is not a
6703 variable-sized type. */
6704 fixed_size_mode mode
6705 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6706 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6707 offset = (offset + align - 1) & ~(align - 1);
6708 tree off = build_int_cst (sizetype, offset);
6709 offset += GET_MODE_SIZE (mode);
6711 if (!init_code)
6713 init_code = build_int_cst (integer_type_node,
6714 IFN_GOACC_REDUCTION_INIT);
6715 fini_code = build_int_cst (integer_type_node,
6716 IFN_GOACC_REDUCTION_FINI);
6717 setup_code = build_int_cst (integer_type_node,
6718 IFN_GOACC_REDUCTION_SETUP);
6719 teardown_code = build_int_cst (integer_type_node,
6720 IFN_GOACC_REDUCTION_TEARDOWN);
6723 tree setup_call
6724 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6725 TREE_TYPE (var), 6, setup_code,
6726 unshare_expr (ref_to_res),
6727 incoming, level, op, off);
6728 tree init_call
6729 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6730 TREE_TYPE (var), 6, init_code,
6731 unshare_expr (ref_to_res),
6732 v1, level, op, off);
6733 tree fini_call
6734 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6735 TREE_TYPE (var), 6, fini_code,
6736 unshare_expr (ref_to_res),
6737 v2, level, op, off);
6738 tree teardown_call
6739 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6740 TREE_TYPE (var), 6, teardown_code,
6741 ref_to_res, v3, level, op, off);
6743 gimplify_assign (v1, setup_call, &before_fork);
6744 gimplify_assign (v2, init_call, &after_fork);
6745 gimplify_assign (v3, fini_call, &before_join);
6746 gimplify_assign (outgoing, teardown_call, &after_join);
6749 /* Now stitch things together. */
6750 gimple_seq_add_seq (fork_seq, before_fork);
6751 if (fork)
6752 gimple_seq_add_stmt (fork_seq, fork);
6753 gimple_seq_add_seq (fork_seq, after_fork);
6755 gimple_seq_add_seq (join_seq, before_join);
6756 if (join)
6757 gimple_seq_add_stmt (join_seq, join);
6758 gimple_seq_add_seq (join_seq, after_join);
6761 /* Generate code to implement the REDUCTION clauses, append it
6762 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6763 that should be emitted also inside of the critical section,
6764 in that case clear *CLIST afterwards, otherwise leave it as is
6765 and let the caller emit it itself. */
6767 static void
6768 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6769 gimple_seq *clist, omp_context *ctx)
6771 gimple_seq sub_seq = NULL;
6772 gimple *stmt;
6773 tree x, c;
6774 int count = 0;
6776 /* OpenACC loop reductions are handled elsewhere. */
6777 if (is_gimple_omp_oacc (ctx->stmt))
6778 return;
6780 /* SIMD reductions are handled in lower_rec_input_clauses. */
6781 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6782 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6783 return;
6785 /* inscan reductions are handled elsewhere. */
6786 if (ctx->scan_inclusive || ctx->scan_exclusive)
6787 return;
6789 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6790 update in that case, otherwise use a lock. */
6791 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6792 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6793 && !OMP_CLAUSE_REDUCTION_TASK (c))
6795 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6796 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6798 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6799 count = -1;
6800 break;
6802 count++;
6805 if (count == 0)
6806 return;
6808 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6810 tree var, ref, new_var, orig_var;
6811 enum tree_code code;
6812 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6814 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6815 || OMP_CLAUSE_REDUCTION_TASK (c))
6816 continue;
6818 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6819 orig_var = var = OMP_CLAUSE_DECL (c);
6820 if (TREE_CODE (var) == MEM_REF)
6822 var = TREE_OPERAND (var, 0);
6823 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6824 var = TREE_OPERAND (var, 0);
6825 if (TREE_CODE (var) == ADDR_EXPR)
6826 var = TREE_OPERAND (var, 0);
6827 else
6829 /* If this is a pointer or referenced based array
6830 section, the var could be private in the outer
6831 context e.g. on orphaned loop construct. Pretend this
6832 is private variable's outer reference. */
6833 ccode = OMP_CLAUSE_PRIVATE;
6834 if (TREE_CODE (var) == INDIRECT_REF)
6835 var = TREE_OPERAND (var, 0);
6837 orig_var = var;
6838 if (is_variable_sized (var))
6840 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6841 var = DECL_VALUE_EXPR (var);
6842 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6843 var = TREE_OPERAND (var, 0);
6844 gcc_assert (DECL_P (var));
6847 new_var = lookup_decl (var, ctx);
6848 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6849 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6850 ref = build_outer_var_ref (var, ctx, ccode);
6851 code = OMP_CLAUSE_REDUCTION_CODE (c);
6853 /* reduction(-:var) sums up the partial results, so it acts
6854 identically to reduction(+:var). */
6855 if (code == MINUS_EXPR)
6856 code = PLUS_EXPR;
6858 if (count == 1)
6860 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6862 addr = save_expr (addr);
6863 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6864 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6865 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6866 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6867 gimplify_and_add (x, stmt_seqp);
6868 return;
6870 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6872 tree d = OMP_CLAUSE_DECL (c);
6873 tree type = TREE_TYPE (d);
6874 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6875 tree i = create_tmp_var (TREE_TYPE (v));
6876 tree ptype = build_pointer_type (TREE_TYPE (type));
6877 tree bias = TREE_OPERAND (d, 1);
6878 d = TREE_OPERAND (d, 0);
6879 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6881 tree b = TREE_OPERAND (d, 1);
6882 b = maybe_lookup_decl (b, ctx);
6883 if (b == NULL)
6885 b = TREE_OPERAND (d, 1);
6886 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6888 if (integer_zerop (bias))
6889 bias = b;
6890 else
6892 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6893 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6894 TREE_TYPE (b), b, bias);
6896 d = TREE_OPERAND (d, 0);
6898 /* For ref build_outer_var_ref already performs this, so
6899 only new_var needs a dereference. */
6900 if (TREE_CODE (d) == INDIRECT_REF)
6902 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6903 gcc_assert (omp_is_reference (var) && var == orig_var);
6905 else if (TREE_CODE (d) == ADDR_EXPR)
6907 if (orig_var == var)
6909 new_var = build_fold_addr_expr (new_var);
6910 ref = build_fold_addr_expr (ref);
6913 else
6915 gcc_assert (orig_var == var);
6916 if (omp_is_reference (var))
6917 ref = build_fold_addr_expr (ref);
6919 if (DECL_P (v))
6921 tree t = maybe_lookup_decl (v, ctx);
6922 if (t)
6923 v = t;
6924 else
6925 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6926 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6928 if (!integer_zerop (bias))
6930 bias = fold_convert_loc (clause_loc, sizetype, bias);
6931 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6932 TREE_TYPE (new_var), new_var,
6933 unshare_expr (bias));
6934 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6935 TREE_TYPE (ref), ref, bias);
6937 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6938 ref = fold_convert_loc (clause_loc, ptype, ref);
6939 tree m = create_tmp_var (ptype);
6940 gimplify_assign (m, new_var, stmt_seqp);
6941 new_var = m;
6942 m = create_tmp_var (ptype);
6943 gimplify_assign (m, ref, stmt_seqp);
6944 ref = m;
6945 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6946 tree body = create_artificial_label (UNKNOWN_LOCATION);
6947 tree end = create_artificial_label (UNKNOWN_LOCATION);
6948 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6949 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6950 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6951 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6953 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6954 tree decl_placeholder
6955 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6956 SET_DECL_VALUE_EXPR (placeholder, out);
6957 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6958 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6959 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6960 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6961 gimple_seq_add_seq (&sub_seq,
6962 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6963 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6964 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6965 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6967 else
6969 x = build2 (code, TREE_TYPE (out), out, priv);
6970 out = unshare_expr (out);
6971 gimplify_assign (out, x, &sub_seq);
6973 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6974 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6975 gimple_seq_add_stmt (&sub_seq, g);
6976 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6977 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6978 gimple_seq_add_stmt (&sub_seq, g);
6979 g = gimple_build_assign (i, PLUS_EXPR, i,
6980 build_int_cst (TREE_TYPE (i), 1));
6981 gimple_seq_add_stmt (&sub_seq, g);
6982 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6983 gimple_seq_add_stmt (&sub_seq, g);
6984 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6986 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6988 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6990 if (omp_is_reference (var)
6991 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6992 TREE_TYPE (ref)))
6993 ref = build_fold_addr_expr_loc (clause_loc, ref);
6994 SET_DECL_VALUE_EXPR (placeholder, ref);
6995 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6996 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6997 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6998 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6999 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7001 else
7003 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7004 ref = build_outer_var_ref (var, ctx);
7005 gimplify_assign (ref, x, &sub_seq);
7009 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7011 gimple_seq_add_stmt (stmt_seqp, stmt);
7013 gimple_seq_add_seq (stmt_seqp, sub_seq);
7015 if (clist)
7017 gimple_seq_add_seq (stmt_seqp, *clist);
7018 *clist = NULL;
7021 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7023 gimple_seq_add_stmt (stmt_seqp, stmt);
7027 /* Generate code to implement the COPYPRIVATE clauses. */
7029 static void
7030 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7031 omp_context *ctx)
7033 tree c;
7035 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7037 tree var, new_var, ref, x;
7038 bool by_ref;
7039 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7041 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7042 continue;
7044 var = OMP_CLAUSE_DECL (c);
7045 by_ref = use_pointer_for_field (var, NULL);
7047 ref = build_sender_ref (var, ctx);
7048 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7049 if (by_ref)
7051 x = build_fold_addr_expr_loc (clause_loc, new_var);
7052 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7054 gimplify_assign (ref, x, slist);
7056 ref = build_receiver_ref (var, false, ctx);
7057 if (by_ref)
7059 ref = fold_convert_loc (clause_loc,
7060 build_pointer_type (TREE_TYPE (new_var)),
7061 ref);
7062 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7064 if (omp_is_reference (var))
7066 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7067 ref = build_simple_mem_ref_loc (clause_loc, ref);
7068 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7070 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7071 gimplify_and_add (x, rlist);
7076 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7077 and REDUCTION from the sender (aka parent) side. */
7079 static void
7080 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7081 omp_context *ctx)
7083 tree c, t;
7084 int ignored_looptemp = 0;
7085 bool is_taskloop = false;
7087 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7088 by GOMP_taskloop. */
7089 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7091 ignored_looptemp = 2;
7092 is_taskloop = true;
7095 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7097 tree val, ref, x, var;
7098 bool by_ref, do_in = false, do_out = false;
7099 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7101 switch (OMP_CLAUSE_CODE (c))
7103 case OMP_CLAUSE_PRIVATE:
7104 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7105 break;
7106 continue;
7107 case OMP_CLAUSE_FIRSTPRIVATE:
7108 case OMP_CLAUSE_COPYIN:
7109 case OMP_CLAUSE_LASTPRIVATE:
7110 case OMP_CLAUSE_IN_REDUCTION:
7111 case OMP_CLAUSE__REDUCTEMP_:
7112 break;
7113 case OMP_CLAUSE_REDUCTION:
7114 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7115 continue;
7116 break;
7117 case OMP_CLAUSE_SHARED:
7118 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7119 break;
7120 continue;
7121 case OMP_CLAUSE__LOOPTEMP_:
7122 if (ignored_looptemp)
7124 ignored_looptemp--;
7125 continue;
7127 break;
7128 default:
7129 continue;
7132 val = OMP_CLAUSE_DECL (c);
7133 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7134 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7135 && TREE_CODE (val) == MEM_REF)
7137 val = TREE_OPERAND (val, 0);
7138 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7139 val = TREE_OPERAND (val, 0);
7140 if (TREE_CODE (val) == INDIRECT_REF
7141 || TREE_CODE (val) == ADDR_EXPR)
7142 val = TREE_OPERAND (val, 0);
7143 if (is_variable_sized (val))
7144 continue;
7147 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7148 outer taskloop region. */
7149 omp_context *ctx_for_o = ctx;
7150 if (is_taskloop
7151 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7152 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7153 ctx_for_o = ctx->outer;
7155 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7157 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7158 && is_global_var (var)
7159 && (val == OMP_CLAUSE_DECL (c)
7160 || !is_task_ctx (ctx)
7161 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7162 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7163 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7164 != POINTER_TYPE)))))
7165 continue;
7167 t = omp_member_access_dummy_var (var);
7168 if (t)
7170 var = DECL_VALUE_EXPR (var);
7171 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7172 if (o != t)
7173 var = unshare_and_remap (var, t, o);
7174 else
7175 var = unshare_expr (var);
7178 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7180 /* Handle taskloop firstprivate/lastprivate, where the
7181 lastprivate on GIMPLE_OMP_TASK is represented as
7182 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7183 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7184 x = omp_build_component_ref (ctx->sender_decl, f);
7185 if (use_pointer_for_field (val, ctx))
7186 var = build_fold_addr_expr (var);
7187 gimplify_assign (x, var, ilist);
7188 DECL_ABSTRACT_ORIGIN (f) = NULL;
7189 continue;
7192 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7193 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7194 || val == OMP_CLAUSE_DECL (c))
7195 && is_variable_sized (val))
7196 continue;
7197 by_ref = use_pointer_for_field (val, NULL);
7199 switch (OMP_CLAUSE_CODE (c))
7201 case OMP_CLAUSE_FIRSTPRIVATE:
7202 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7203 && !by_ref
7204 && is_task_ctx (ctx))
7205 TREE_NO_WARNING (var) = 1;
7206 do_in = true;
7207 break;
7209 case OMP_CLAUSE_PRIVATE:
7210 case OMP_CLAUSE_COPYIN:
7211 case OMP_CLAUSE__LOOPTEMP_:
7212 case OMP_CLAUSE__REDUCTEMP_:
7213 do_in = true;
7214 break;
7216 case OMP_CLAUSE_LASTPRIVATE:
7217 if (by_ref || omp_is_reference (val))
7219 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7220 continue;
7221 do_in = true;
7223 else
7225 do_out = true;
7226 if (lang_hooks.decls.omp_private_outer_ref (val))
7227 do_in = true;
7229 break;
7231 case OMP_CLAUSE_REDUCTION:
7232 case OMP_CLAUSE_IN_REDUCTION:
7233 do_in = true;
7234 if (val == OMP_CLAUSE_DECL (c))
7236 if (is_task_ctx (ctx))
7237 by_ref = use_pointer_for_field (val, ctx);
7238 else
7239 do_out = !(by_ref || omp_is_reference (val));
7241 else
7242 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7243 break;
7245 default:
7246 gcc_unreachable ();
7249 if (do_in)
7251 ref = build_sender_ref (val, ctx);
7252 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7253 gimplify_assign (ref, x, ilist);
7254 if (is_task_ctx (ctx))
7255 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7258 if (do_out)
7260 ref = build_sender_ref (val, ctx);
7261 gimplify_assign (var, ref, olist);
7266 /* Generate code to implement SHARED from the sender (aka parent)
7267 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7268 list things that got automatically shared. */
7270 static void
7271 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7273 tree var, ovar, nvar, t, f, x, record_type;
7275 if (ctx->record_type == NULL)
7276 return;
7278 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7279 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7281 ovar = DECL_ABSTRACT_ORIGIN (f);
7282 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7283 continue;
7285 nvar = maybe_lookup_decl (ovar, ctx);
7286 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7287 continue;
7289 /* If CTX is a nested parallel directive. Find the immediately
7290 enclosing parallel or workshare construct that contains a
7291 mapping for OVAR. */
7292 var = lookup_decl_in_outer_ctx (ovar, ctx);
7294 t = omp_member_access_dummy_var (var);
7295 if (t)
7297 var = DECL_VALUE_EXPR (var);
7298 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7299 if (o != t)
7300 var = unshare_and_remap (var, t, o);
7301 else
7302 var = unshare_expr (var);
7305 if (use_pointer_for_field (ovar, ctx))
7307 x = build_sender_ref (ovar, ctx);
7308 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7309 && TREE_TYPE (f) == TREE_TYPE (ovar))
7311 gcc_assert (is_parallel_ctx (ctx)
7312 && DECL_ARTIFICIAL (ovar));
7313 /* _condtemp_ clause. */
7314 var = build_constructor (TREE_TYPE (x), NULL);
7316 else
7317 var = build_fold_addr_expr (var);
7318 gimplify_assign (x, var, ilist);
7320 else
7322 x = build_sender_ref (ovar, ctx);
7323 gimplify_assign (x, var, ilist);
7325 if (!TREE_READONLY (var)
7326 /* We don't need to receive a new reference to a result
7327 or parm decl. In fact we may not store to it as we will
7328 invalidate any pending RSO and generate wrong gimple
7329 during inlining. */
7330 && !((TREE_CODE (var) == RESULT_DECL
7331 || TREE_CODE (var) == PARM_DECL)
7332 && DECL_BY_REFERENCE (var)))
7334 x = build_sender_ref (ovar, ctx);
7335 gimplify_assign (var, x, olist);
7341 /* Emit an OpenACC head marker call, encapulating the partitioning and
7342 other information that must be processed by the target compiler.
7343 Return the maximum number of dimensions the associated loop might
7344 be partitioned over. */
7346 static unsigned
7347 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7348 gimple_seq *seq, omp_context *ctx)
7350 unsigned levels = 0;
7351 unsigned tag = 0;
7352 tree gang_static = NULL_TREE;
7353 auto_vec<tree, 5> args;
7355 args.quick_push (build_int_cst
7356 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7357 args.quick_push (ddvar);
7358 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7360 switch (OMP_CLAUSE_CODE (c))
7362 case OMP_CLAUSE_GANG:
7363 tag |= OLF_DIM_GANG;
7364 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7365 /* static:* is represented by -1, and we can ignore it, as
7366 scheduling is always static. */
7367 if (gang_static && integer_minus_onep (gang_static))
7368 gang_static = NULL_TREE;
7369 levels++;
7370 break;
7372 case OMP_CLAUSE_WORKER:
7373 tag |= OLF_DIM_WORKER;
7374 levels++;
7375 break;
7377 case OMP_CLAUSE_VECTOR:
7378 tag |= OLF_DIM_VECTOR;
7379 levels++;
7380 break;
7382 case OMP_CLAUSE_SEQ:
7383 tag |= OLF_SEQ;
7384 break;
7386 case OMP_CLAUSE_AUTO:
7387 tag |= OLF_AUTO;
7388 break;
7390 case OMP_CLAUSE_INDEPENDENT:
7391 tag |= OLF_INDEPENDENT;
7392 break;
7394 case OMP_CLAUSE_TILE:
7395 tag |= OLF_TILE;
7396 break;
7398 default:
7399 continue;
7403 if (gang_static)
7405 if (DECL_P (gang_static))
7406 gang_static = build_outer_var_ref (gang_static, ctx);
7407 tag |= OLF_GANG_STATIC;
7410 /* In a parallel region, loops are implicitly INDEPENDENT. */
7411 omp_context *tgt = enclosing_target_ctx (ctx);
7412 if (!tgt || is_oacc_parallel (tgt))
7413 tag |= OLF_INDEPENDENT;
7415 if (tag & OLF_TILE)
7416 /* Tiling could use all 3 levels. */
7417 levels = 3;
7418 else
7420 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7421 Ensure at least one level, or 2 for possible auto
7422 partitioning */
7423 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7424 << OLF_DIM_BASE) | OLF_SEQ));
7426 if (levels < 1u + maybe_auto)
7427 levels = 1u + maybe_auto;
7430 args.quick_push (build_int_cst (integer_type_node, levels));
7431 args.quick_push (build_int_cst (integer_type_node, tag));
7432 if (gang_static)
7433 args.quick_push (gang_static);
7435 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7436 gimple_set_location (call, loc);
7437 gimple_set_lhs (call, ddvar);
7438 gimple_seq_add_stmt (seq, call);
7440 return levels;
7443 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7444 partitioning level of the enclosed region. */
7446 static void
7447 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7448 tree tofollow, gimple_seq *seq)
7450 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7451 : IFN_UNIQUE_OACC_TAIL_MARK);
7452 tree marker = build_int_cst (integer_type_node, marker_kind);
7453 int nargs = 2 + (tofollow != NULL_TREE);
7454 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7455 marker, ddvar, tofollow);
7456 gimple_set_location (call, loc);
7457 gimple_set_lhs (call, ddvar);
7458 gimple_seq_add_stmt (seq, call);
7461 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7462 the loop clauses, from which we extract reductions. Initialize
7463 HEAD and TAIL. */
7465 static void
7466 lower_oacc_head_tail (location_t loc, tree clauses,
7467 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7469 bool inner = false;
7470 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7471 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7473 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7474 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7475 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7477 gcc_assert (count);
7478 for (unsigned done = 1; count; count--, done++)
7480 gimple_seq fork_seq = NULL;
7481 gimple_seq join_seq = NULL;
7483 tree place = build_int_cst (integer_type_node, -1);
7484 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7485 fork_kind, ddvar, place);
7486 gimple_set_location (fork, loc);
7487 gimple_set_lhs (fork, ddvar);
7489 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7490 join_kind, ddvar, place);
7491 gimple_set_location (join, loc);
7492 gimple_set_lhs (join, ddvar);
7494 /* Mark the beginning of this level sequence. */
7495 if (inner)
7496 lower_oacc_loop_marker (loc, ddvar, true,
7497 build_int_cst (integer_type_node, count),
7498 &fork_seq);
7499 lower_oacc_loop_marker (loc, ddvar, false,
7500 build_int_cst (integer_type_node, done),
7501 &join_seq);
7503 lower_oacc_reductions (loc, clauses, place, inner,
7504 fork, join, &fork_seq, &join_seq, ctx);
7506 /* Append this level to head. */
7507 gimple_seq_add_seq (head, fork_seq);
7508 /* Prepend it to tail. */
7509 gimple_seq_add_seq (&join_seq, *tail);
7510 *tail = join_seq;
7512 inner = true;
7515 /* Mark the end of the sequence. */
7516 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7517 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7520 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7521 catch handler and return it. This prevents programs from violating the
7522 structured block semantics with throws. */
7524 static gimple_seq
7525 maybe_catch_exception (gimple_seq body)
7527 gimple *g;
7528 tree decl;
7530 if (!flag_exceptions)
7531 return body;
7533 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7534 decl = lang_hooks.eh_protect_cleanup_actions ();
7535 else
7536 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7538 g = gimple_build_eh_must_not_throw (decl);
7539 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7540 GIMPLE_TRY_CATCH);
7542 return gimple_seq_alloc_with_stmt (g);
7546 /* Routines to lower OMP directives into OMP-GIMPLE. */
7548 /* If ctx is a worksharing context inside of a cancellable parallel
7549 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7550 and conditional branch to parallel's cancel_label to handle
7551 cancellation in the implicit barrier. */
7553 static void
7554 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7555 gimple_seq *body)
7557 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7558 if (gimple_omp_return_nowait_p (omp_return))
7559 return;
7560 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7561 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7562 && outer->cancellable)
7564 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7565 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7566 tree lhs = create_tmp_var (c_bool_type);
7567 gimple_omp_return_set_lhs (omp_return, lhs);
7568 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7569 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7570 fold_convert (c_bool_type,
7571 boolean_false_node),
7572 outer->cancel_label, fallthru_label);
7573 gimple_seq_add_stmt (body, g);
7574 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7576 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7577 return;
7580 /* Find the first task_reduction or reduction clause or return NULL
7581 if there are none. */
7583 static inline tree
7584 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7585 enum omp_clause_code ccode)
7587 while (1)
7589 clauses = omp_find_clause (clauses, ccode);
7590 if (clauses == NULL_TREE)
7591 return NULL_TREE;
7592 if (ccode != OMP_CLAUSE_REDUCTION
7593 || code == OMP_TASKLOOP
7594 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7595 return clauses;
7596 clauses = OMP_CLAUSE_CHAIN (clauses);
7600 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7601 gimple_seq *, gimple_seq *);
7603 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7604 CTX is the enclosing OMP context for the current statement. */
7606 static void
7607 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7609 tree block, control;
7610 gimple_stmt_iterator tgsi;
7611 gomp_sections *stmt;
7612 gimple *t;
7613 gbind *new_stmt, *bind;
7614 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7616 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7618 push_gimplify_context ();
7620 dlist = NULL;
7621 ilist = NULL;
7623 tree rclauses
7624 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7625 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7626 tree rtmp = NULL_TREE;
7627 if (rclauses)
7629 tree type = build_pointer_type (pointer_sized_int_node);
7630 tree temp = create_tmp_var (type);
7631 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7632 OMP_CLAUSE_DECL (c) = temp;
7633 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7634 gimple_omp_sections_set_clauses (stmt, c);
7635 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7636 gimple_omp_sections_clauses (stmt),
7637 &ilist, &tred_dlist);
7638 rclauses = c;
7639 rtmp = make_ssa_name (type);
7640 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7643 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7644 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7646 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7647 &ilist, &dlist, ctx, NULL);
7649 control = create_tmp_var (unsigned_type_node, ".section");
7650 gimple_omp_sections_set_control (stmt, control);
7652 new_body = gimple_omp_body (stmt);
7653 gimple_omp_set_body (stmt, NULL);
7654 tgsi = gsi_start (new_body);
7655 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7657 omp_context *sctx;
7658 gimple *sec_start;
7660 sec_start = gsi_stmt (tgsi);
7661 sctx = maybe_lookup_ctx (sec_start);
7662 gcc_assert (sctx);
7664 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7665 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7666 GSI_CONTINUE_LINKING);
7667 gimple_omp_set_body (sec_start, NULL);
7669 if (gsi_one_before_end_p (tgsi))
7671 gimple_seq l = NULL;
7672 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7673 &ilist, &l, &clist, ctx);
7674 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7675 gimple_omp_section_set_last (sec_start);
7678 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7679 GSI_CONTINUE_LINKING);
7682 block = make_node (BLOCK);
7683 bind = gimple_build_bind (NULL, new_body, block);
7685 olist = NULL;
7686 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7687 &clist, ctx);
7688 if (clist)
7690 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7691 gcall *g = gimple_build_call (fndecl, 0);
7692 gimple_seq_add_stmt (&olist, g);
7693 gimple_seq_add_seq (&olist, clist);
7694 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7695 g = gimple_build_call (fndecl, 0);
7696 gimple_seq_add_stmt (&olist, g);
7699 block = make_node (BLOCK);
7700 new_stmt = gimple_build_bind (NULL, NULL, block);
7701 gsi_replace (gsi_p, new_stmt, true);
7703 pop_gimplify_context (new_stmt);
7704 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7705 BLOCK_VARS (block) = gimple_bind_vars (bind);
7706 if (BLOCK_VARS (block))
7707 TREE_USED (block) = 1;
7709 new_body = NULL;
7710 gimple_seq_add_seq (&new_body, ilist);
7711 gimple_seq_add_stmt (&new_body, stmt);
7712 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7713 gimple_seq_add_stmt (&new_body, bind);
7715 t = gimple_build_omp_continue (control, control);
7716 gimple_seq_add_stmt (&new_body, t);
7718 gimple_seq_add_seq (&new_body, olist);
7719 if (ctx->cancellable)
7720 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7721 gimple_seq_add_seq (&new_body, dlist);
7723 new_body = maybe_catch_exception (new_body);
7725 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7726 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7727 t = gimple_build_omp_return (nowait);
7728 gimple_seq_add_stmt (&new_body, t);
7729 gimple_seq_add_seq (&new_body, tred_dlist);
7730 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7732 if (rclauses)
7733 OMP_CLAUSE_DECL (rclauses) = rtmp;
7735 gimple_bind_set_body (new_stmt, new_body);
7739 /* A subroutine of lower_omp_single. Expand the simple form of
7740 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7742 if (GOMP_single_start ())
7743 BODY;
7744 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7746 FIXME. It may be better to delay expanding the logic of this until
7747 pass_expand_omp. The expanded logic may make the job more difficult
7748 to a synchronization analysis pass. */
7750 static void
7751 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7753 location_t loc = gimple_location (single_stmt);
7754 tree tlabel = create_artificial_label (loc);
7755 tree flabel = create_artificial_label (loc);
7756 gimple *call, *cond;
7757 tree lhs, decl;
7759 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7760 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7761 call = gimple_build_call (decl, 0);
7762 gimple_call_set_lhs (call, lhs);
7763 gimple_seq_add_stmt (pre_p, call);
7765 cond = gimple_build_cond (EQ_EXPR, lhs,
7766 fold_convert_loc (loc, TREE_TYPE (lhs),
7767 boolean_true_node),
7768 tlabel, flabel);
7769 gimple_seq_add_stmt (pre_p, cond);
7770 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7771 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7772 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7776 /* A subroutine of lower_omp_single. Expand the simple form of
7777 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7779 #pragma omp single copyprivate (a, b, c)
7781 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7784 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7786 BODY;
7787 copyout.a = a;
7788 copyout.b = b;
7789 copyout.c = c;
7790 GOMP_single_copy_end (&copyout);
7792 else
7794 a = copyout_p->a;
7795 b = copyout_p->b;
7796 c = copyout_p->c;
7798 GOMP_barrier ();
7801 FIXME. It may be better to delay expanding the logic of this until
7802 pass_expand_omp. The expanded logic may make the job more difficult
7803 to a synchronization analysis pass. */
7805 static void
7806 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7807 omp_context *ctx)
7809 tree ptr_type, t, l0, l1, l2, bfn_decl;
7810 gimple_seq copyin_seq;
7811 location_t loc = gimple_location (single_stmt);
7813 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7815 ptr_type = build_pointer_type (ctx->record_type);
7816 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7818 l0 = create_artificial_label (loc);
7819 l1 = create_artificial_label (loc);
7820 l2 = create_artificial_label (loc);
7822 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7823 t = build_call_expr_loc (loc, bfn_decl, 0);
7824 t = fold_convert_loc (loc, ptr_type, t);
7825 gimplify_assign (ctx->receiver_decl, t, pre_p);
7827 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7828 build_int_cst (ptr_type, 0));
7829 t = build3 (COND_EXPR, void_type_node, t,
7830 build_and_jump (&l0), build_and_jump (&l1));
7831 gimplify_and_add (t, pre_p);
7833 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7835 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7837 copyin_seq = NULL;
7838 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7839 &copyin_seq, ctx);
7841 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7842 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7843 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7844 gimplify_and_add (t, pre_p);
7846 t = build_and_jump (&l2);
7847 gimplify_and_add (t, pre_p);
7849 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7851 gimple_seq_add_seq (pre_p, copyin_seq);
7853 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7857 /* Expand code for an OpenMP single directive. */
7859 static void
7860 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7862 tree block;
7863 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7864 gbind *bind;
7865 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7867 push_gimplify_context ();
7869 block = make_node (BLOCK);
7870 bind = gimple_build_bind (NULL, NULL, block);
7871 gsi_replace (gsi_p, bind, true);
7872 bind_body = NULL;
7873 dlist = NULL;
7874 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7875 &bind_body, &dlist, ctx, NULL);
7876 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7878 gimple_seq_add_stmt (&bind_body, single_stmt);
7880 if (ctx->record_type)
7881 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7882 else
7883 lower_omp_single_simple (single_stmt, &bind_body);
7885 gimple_omp_set_body (single_stmt, NULL);
7887 gimple_seq_add_seq (&bind_body, dlist);
7889 bind_body = maybe_catch_exception (bind_body);
7891 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7892 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7893 gimple *g = gimple_build_omp_return (nowait);
7894 gimple_seq_add_stmt (&bind_body_tail, g);
7895 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7896 if (ctx->record_type)
7898 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7899 tree clobber = build_constructor (ctx->record_type, NULL);
7900 TREE_THIS_VOLATILE (clobber) = 1;
7901 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7902 clobber), GSI_SAME_STMT);
7904 gimple_seq_add_seq (&bind_body, bind_body_tail);
7905 gimple_bind_set_body (bind, bind_body);
7907 pop_gimplify_context (bind);
7909 gimple_bind_append_vars (bind, ctx->block_vars);
7910 BLOCK_VARS (block) = ctx->block_vars;
7911 if (BLOCK_VARS (block))
7912 TREE_USED (block) = 1;
7916 /* Expand code for an OpenMP master directive. */
7918 static void
7919 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7921 tree block, lab = NULL, x, bfn_decl;
7922 gimple *stmt = gsi_stmt (*gsi_p);
7923 gbind *bind;
7924 location_t loc = gimple_location (stmt);
7925 gimple_seq tseq;
7927 push_gimplify_context ();
7929 block = make_node (BLOCK);
7930 bind = gimple_build_bind (NULL, NULL, block);
7931 gsi_replace (gsi_p, bind, true);
7932 gimple_bind_add_stmt (bind, stmt);
7934 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7935 x = build_call_expr_loc (loc, bfn_decl, 0);
7936 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7937 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7938 tseq = NULL;
7939 gimplify_and_add (x, &tseq);
7940 gimple_bind_add_seq (bind, tseq);
7942 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7943 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7944 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7945 gimple_omp_set_body (stmt, NULL);
7947 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7949 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7951 pop_gimplify_context (bind);
7953 gimple_bind_append_vars (bind, ctx->block_vars);
7954 BLOCK_VARS (block) = ctx->block_vars;
7957 /* Helper function for lower_omp_task_reductions. For a specific PASS
7958 find out the current clause it should be processed, or return false
7959 if all have been processed already. */
7961 static inline bool
7962 omp_task_reduction_iterate (int pass, enum tree_code code,
7963 enum omp_clause_code ccode, tree *c, tree *decl,
7964 tree *type, tree *next)
7966 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7968 if (ccode == OMP_CLAUSE_REDUCTION
7969 && code != OMP_TASKLOOP
7970 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7971 continue;
7972 *decl = OMP_CLAUSE_DECL (*c);
7973 *type = TREE_TYPE (*decl);
7974 if (TREE_CODE (*decl) == MEM_REF)
7976 if (pass != 1)
7977 continue;
7979 else
7981 if (omp_is_reference (*decl))
7982 *type = TREE_TYPE (*type);
7983 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7984 continue;
7986 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7987 return true;
7989 *decl = NULL_TREE;
7990 *type = NULL_TREE;
7991 *next = NULL_TREE;
7992 return false;
7995 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7996 OMP_TASKGROUP only with task modifier). Register mapping of those in
7997 START sequence and reducing them and unregister them in the END sequence. */
7999 static void
8000 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8001 gimple_seq *start, gimple_seq *end)
8003 enum omp_clause_code ccode
8004 = (code == OMP_TASKGROUP
8005 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8006 tree cancellable = NULL_TREE;
8007 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8008 if (clauses == NULL_TREE)
8009 return;
8010 if (code == OMP_FOR || code == OMP_SECTIONS)
8012 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8013 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8014 && outer->cancellable)
8016 cancellable = error_mark_node;
8017 break;
8019 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8020 break;
8022 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8023 tree *last = &TYPE_FIELDS (record_type);
8024 unsigned cnt = 0;
8025 if (cancellable)
8027 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8028 ptr_type_node);
8029 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8030 integer_type_node);
8031 *last = field;
8032 DECL_CHAIN (field) = ifield;
8033 last = &DECL_CHAIN (ifield);
8034 DECL_CONTEXT (field) = record_type;
8035 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8036 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8037 DECL_CONTEXT (ifield) = record_type;
8038 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8039 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8041 for (int pass = 0; pass < 2; pass++)
8043 tree decl, type, next;
8044 for (tree c = clauses;
8045 omp_task_reduction_iterate (pass, code, ccode,
8046 &c, &decl, &type, &next); c = next)
8048 ++cnt;
8049 tree new_type = type;
8050 if (ctx->outer)
8051 new_type = remap_type (type, &ctx->outer->cb);
8052 tree field
8053 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8054 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8055 new_type);
8056 if (DECL_P (decl) && type == TREE_TYPE (decl))
8058 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8059 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8060 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8062 else
8063 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8064 DECL_CONTEXT (field) = record_type;
8065 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8066 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8067 *last = field;
8068 last = &DECL_CHAIN (field);
8069 tree bfield
8070 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8071 boolean_type_node);
8072 DECL_CONTEXT (bfield) = record_type;
8073 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8074 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8075 *last = bfield;
8076 last = &DECL_CHAIN (bfield);
8079 *last = NULL_TREE;
8080 layout_type (record_type);
8082 /* Build up an array which registers with the runtime all the reductions
8083 and deregisters them at the end. Format documented in libgomp/task.c. */
8084 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8085 tree avar = create_tmp_var_raw (atype);
8086 gimple_add_tmp_var (avar);
8087 TREE_ADDRESSABLE (avar) = 1;
8088 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8089 NULL_TREE, NULL_TREE);
8090 tree t = build_int_cst (pointer_sized_int_node, cnt);
8091 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8092 gimple_seq seq = NULL;
8093 tree sz = fold_convert (pointer_sized_int_node,
8094 TYPE_SIZE_UNIT (record_type));
8095 int cachesz = 64;
8096 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8097 build_int_cst (pointer_sized_int_node, cachesz - 1));
8098 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8099 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8100 ctx->task_reductions.create (1 + cnt);
8101 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8102 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8103 ? sz : NULL_TREE);
8104 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8105 gimple_seq_add_seq (start, seq);
8106 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8107 NULL_TREE, NULL_TREE);
8108 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8109 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8110 NULL_TREE, NULL_TREE);
8111 t = build_int_cst (pointer_sized_int_node,
8112 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8113 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8114 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8115 NULL_TREE, NULL_TREE);
8116 t = build_int_cst (pointer_sized_int_node, -1);
8117 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8118 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8119 NULL_TREE, NULL_TREE);
8120 t = build_int_cst (pointer_sized_int_node, 0);
8121 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8123 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8124 and for each task reduction checks a bool right after the private variable
8125 within that thread's chunk; if the bool is clear, it hasn't been
8126 initialized and thus isn't going to be reduced nor destructed, otherwise
8127 reduce and destruct it. */
8128 tree idx = create_tmp_var (size_type_node);
8129 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8130 tree num_thr_sz = create_tmp_var (size_type_node);
8131 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8132 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8133 tree lab3 = NULL_TREE;
8134 gimple *g;
8135 if (code == OMP_FOR || code == OMP_SECTIONS)
8137 /* For worksharing constructs, only perform it in the master thread,
8138 with the exception of cancelled implicit barriers - then only handle
8139 the current thread. */
8140 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8141 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8142 tree thr_num = create_tmp_var (integer_type_node);
8143 g = gimple_build_call (t, 0);
8144 gimple_call_set_lhs (g, thr_num);
8145 gimple_seq_add_stmt (end, g);
8146 if (cancellable)
8148 tree c;
8149 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8150 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8151 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8152 if (code == OMP_FOR)
8153 c = gimple_omp_for_clauses (ctx->stmt);
8154 else /* if (code == OMP_SECTIONS) */
8155 c = gimple_omp_sections_clauses (ctx->stmt);
8156 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8157 cancellable = c;
8158 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8159 lab5, lab6);
8160 gimple_seq_add_stmt (end, g);
8161 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8162 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8163 gimple_seq_add_stmt (end, g);
8164 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8165 build_one_cst (TREE_TYPE (idx)));
8166 gimple_seq_add_stmt (end, g);
8167 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8168 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8170 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8171 gimple_seq_add_stmt (end, g);
8172 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8174 if (code != OMP_PARALLEL)
8176 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8177 tree num_thr = create_tmp_var (integer_type_node);
8178 g = gimple_build_call (t, 0);
8179 gimple_call_set_lhs (g, num_thr);
8180 gimple_seq_add_stmt (end, g);
8181 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8182 gimple_seq_add_stmt (end, g);
8183 if (cancellable)
8184 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8186 else
8188 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8189 OMP_CLAUSE__REDUCTEMP_);
8190 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8191 t = fold_convert (size_type_node, t);
8192 gimplify_assign (num_thr_sz, t, end);
8194 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8195 NULL_TREE, NULL_TREE);
8196 tree data = create_tmp_var (pointer_sized_int_node);
8197 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8198 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8199 tree ptr;
8200 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8201 ptr = create_tmp_var (build_pointer_type (record_type));
8202 else
8203 ptr = create_tmp_var (ptr_type_node);
8204 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8206 tree field = TYPE_FIELDS (record_type);
8207 cnt = 0;
8208 if (cancellable)
8209 field = DECL_CHAIN (DECL_CHAIN (field));
8210 for (int pass = 0; pass < 2; pass++)
8212 tree decl, type, next;
8213 for (tree c = clauses;
8214 omp_task_reduction_iterate (pass, code, ccode,
8215 &c, &decl, &type, &next); c = next)
8217 tree var = decl, ref;
8218 if (TREE_CODE (decl) == MEM_REF)
8220 var = TREE_OPERAND (var, 0);
8221 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8222 var = TREE_OPERAND (var, 0);
8223 tree v = var;
8224 if (TREE_CODE (var) == ADDR_EXPR)
8225 var = TREE_OPERAND (var, 0);
8226 else if (TREE_CODE (var) == INDIRECT_REF)
8227 var = TREE_OPERAND (var, 0);
8228 tree orig_var = var;
8229 if (is_variable_sized (var))
8231 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8232 var = DECL_VALUE_EXPR (var);
8233 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8234 var = TREE_OPERAND (var, 0);
8235 gcc_assert (DECL_P (var));
8237 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8238 if (orig_var != var)
8239 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8240 else if (TREE_CODE (v) == ADDR_EXPR)
8241 t = build_fold_addr_expr (t);
8242 else if (TREE_CODE (v) == INDIRECT_REF)
8243 t = build_fold_indirect_ref (t);
8244 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8246 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8247 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8248 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8250 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8251 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8252 fold_convert (size_type_node,
8253 TREE_OPERAND (decl, 1)));
8255 else
8257 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8258 if (!omp_is_reference (decl))
8259 t = build_fold_addr_expr (t);
8261 t = fold_convert (pointer_sized_int_node, t);
8262 seq = NULL;
8263 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8264 gimple_seq_add_seq (start, seq);
8265 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8266 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8267 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8268 t = unshare_expr (byte_position (field));
8269 t = fold_convert (pointer_sized_int_node, t);
8270 ctx->task_reduction_map->put (c, cnt);
8271 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8272 ? t : NULL_TREE);
8273 seq = NULL;
8274 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8275 gimple_seq_add_seq (start, seq);
8276 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8277 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8278 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8280 tree bfield = DECL_CHAIN (field);
8281 tree cond;
8282 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8283 /* In parallel or worksharing all threads unconditionally
8284 initialize all their task reduction private variables. */
8285 cond = boolean_true_node;
8286 else if (TREE_TYPE (ptr) == ptr_type_node)
8288 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8289 unshare_expr (byte_position (bfield)));
8290 seq = NULL;
8291 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8292 gimple_seq_add_seq (end, seq);
8293 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8294 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8295 build_int_cst (pbool, 0));
8297 else
8298 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8299 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8300 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8301 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8302 tree condv = create_tmp_var (boolean_type_node);
8303 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8304 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8305 lab3, lab4);
8306 gimple_seq_add_stmt (end, g);
8307 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8308 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8310 /* If this reduction doesn't need destruction and parallel
8311 has been cancelled, there is nothing to do for this
8312 reduction, so jump around the merge operation. */
8313 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8314 g = gimple_build_cond (NE_EXPR, cancellable,
8315 build_zero_cst (TREE_TYPE (cancellable)),
8316 lab4, lab5);
8317 gimple_seq_add_stmt (end, g);
8318 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8321 tree new_var;
8322 if (TREE_TYPE (ptr) == ptr_type_node)
8324 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8325 unshare_expr (byte_position (field)));
8326 seq = NULL;
8327 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8328 gimple_seq_add_seq (end, seq);
8329 tree pbool = build_pointer_type (TREE_TYPE (field));
8330 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8331 build_int_cst (pbool, 0));
8333 else
8334 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8335 build_simple_mem_ref (ptr), field, NULL_TREE);
8337 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8338 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8339 ref = build_simple_mem_ref (ref);
8340 /* reduction(-:var) sums up the partial results, so it acts
8341 identically to reduction(+:var). */
8342 if (rcode == MINUS_EXPR)
8343 rcode = PLUS_EXPR;
8344 if (TREE_CODE (decl) == MEM_REF)
8346 tree type = TREE_TYPE (new_var);
8347 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8348 tree i = create_tmp_var (TREE_TYPE (v));
8349 tree ptype = build_pointer_type (TREE_TYPE (type));
8350 if (DECL_P (v))
8352 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8353 tree vv = create_tmp_var (TREE_TYPE (v));
8354 gimplify_assign (vv, v, start);
8355 v = vv;
8357 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8358 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8359 new_var = build_fold_addr_expr (new_var);
8360 new_var = fold_convert (ptype, new_var);
8361 ref = fold_convert (ptype, ref);
8362 tree m = create_tmp_var (ptype);
8363 gimplify_assign (m, new_var, end);
8364 new_var = m;
8365 m = create_tmp_var (ptype);
8366 gimplify_assign (m, ref, end);
8367 ref = m;
8368 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8369 tree body = create_artificial_label (UNKNOWN_LOCATION);
8370 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8371 gimple_seq_add_stmt (end, gimple_build_label (body));
8372 tree priv = build_simple_mem_ref (new_var);
8373 tree out = build_simple_mem_ref (ref);
8374 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8376 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8377 tree decl_placeholder
8378 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8379 tree lab6 = NULL_TREE;
8380 if (cancellable)
8382 /* If this reduction needs destruction and parallel
8383 has been cancelled, jump around the merge operation
8384 to the destruction. */
8385 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8386 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8387 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8388 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8389 lab6, lab5);
8390 gimple_seq_add_stmt (end, g);
8391 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8393 SET_DECL_VALUE_EXPR (placeholder, out);
8394 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8395 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8396 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8397 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8398 gimple_seq_add_seq (end,
8399 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8400 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8401 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8403 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8404 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8406 if (cancellable)
8407 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8408 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8409 if (x)
8411 gimple_seq tseq = NULL;
8412 gimplify_stmt (&x, &tseq);
8413 gimple_seq_add_seq (end, tseq);
8416 else
8418 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8419 out = unshare_expr (out);
8420 gimplify_assign (out, x, end);
8422 gimple *g
8423 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8424 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8425 gimple_seq_add_stmt (end, g);
8426 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8427 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8428 gimple_seq_add_stmt (end, g);
8429 g = gimple_build_assign (i, PLUS_EXPR, i,
8430 build_int_cst (TREE_TYPE (i), 1));
8431 gimple_seq_add_stmt (end, g);
8432 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8433 gimple_seq_add_stmt (end, g);
8434 gimple_seq_add_stmt (end, gimple_build_label (endl));
8436 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8438 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8439 tree oldv = NULL_TREE;
8440 tree lab6 = NULL_TREE;
8441 if (cancellable)
8443 /* If this reduction needs destruction and parallel
8444 has been cancelled, jump around the merge operation
8445 to the destruction. */
8446 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8447 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8448 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8449 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8450 lab6, lab5);
8451 gimple_seq_add_stmt (end, g);
8452 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8454 if (omp_is_reference (decl)
8455 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8456 TREE_TYPE (ref)))
8457 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8458 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8459 tree refv = create_tmp_var (TREE_TYPE (ref));
8460 gimplify_assign (refv, ref, end);
8461 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8462 SET_DECL_VALUE_EXPR (placeholder, ref);
8463 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8464 tree d = maybe_lookup_decl (decl, ctx);
8465 gcc_assert (d);
8466 if (DECL_HAS_VALUE_EXPR_P (d))
8467 oldv = DECL_VALUE_EXPR (d);
8468 if (omp_is_reference (var))
8470 tree v = fold_convert (TREE_TYPE (d),
8471 build_fold_addr_expr (new_var));
8472 SET_DECL_VALUE_EXPR (d, v);
8474 else
8475 SET_DECL_VALUE_EXPR (d, new_var);
8476 DECL_HAS_VALUE_EXPR_P (d) = 1;
8477 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8478 if (oldv)
8479 SET_DECL_VALUE_EXPR (d, oldv);
8480 else
8482 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8483 DECL_HAS_VALUE_EXPR_P (d) = 0;
8485 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8486 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8487 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8488 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8489 if (cancellable)
8490 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8491 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8492 if (x)
8494 gimple_seq tseq = NULL;
8495 gimplify_stmt (&x, &tseq);
8496 gimple_seq_add_seq (end, tseq);
8499 else
8501 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8502 ref = unshare_expr (ref);
8503 gimplify_assign (ref, x, end);
8505 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8506 ++cnt;
8507 field = DECL_CHAIN (bfield);
8511 if (code == OMP_TASKGROUP)
8513 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8514 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8515 gimple_seq_add_stmt (start, g);
8517 else
8519 tree c;
8520 if (code == OMP_FOR)
8521 c = gimple_omp_for_clauses (ctx->stmt);
8522 else if (code == OMP_SECTIONS)
8523 c = gimple_omp_sections_clauses (ctx->stmt);
8524 else
8525 c = gimple_omp_taskreg_clauses (ctx->stmt);
8526 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8527 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8528 build_fold_addr_expr (avar));
8529 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8532 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8533 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8534 size_one_node));
8535 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8536 gimple_seq_add_stmt (end, g);
8537 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8538 if (code == OMP_FOR || code == OMP_SECTIONS)
8540 enum built_in_function bfn
8541 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8542 t = builtin_decl_explicit (bfn);
8543 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8544 tree arg;
8545 if (cancellable)
8547 arg = create_tmp_var (c_bool_type);
8548 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8549 cancellable));
8551 else
8552 arg = build_int_cst (c_bool_type, 0);
8553 g = gimple_build_call (t, 1, arg);
8555 else
8557 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8558 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8560 gimple_seq_add_stmt (end, g);
8561 t = build_constructor (atype, NULL);
8562 TREE_THIS_VOLATILE (t) = 1;
8563 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8566 /* Expand code for an OpenMP taskgroup directive. */
8568 static void
8569 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8571 gimple *stmt = gsi_stmt (*gsi_p);
8572 gcall *x;
8573 gbind *bind;
8574 gimple_seq dseq = NULL;
8575 tree block = make_node (BLOCK);
8577 bind = gimple_build_bind (NULL, NULL, block);
8578 gsi_replace (gsi_p, bind, true);
8579 gimple_bind_add_stmt (bind, stmt);
8581 push_gimplify_context ();
8583 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8585 gimple_bind_add_stmt (bind, x);
8587 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8588 gimple_omp_taskgroup_clauses (stmt),
8589 gimple_bind_body_ptr (bind), &dseq);
8591 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8592 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8593 gimple_omp_set_body (stmt, NULL);
8595 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8596 gimple_bind_add_seq (bind, dseq);
8598 pop_gimplify_context (bind);
8600 gimple_bind_append_vars (bind, ctx->block_vars);
8601 BLOCK_VARS (block) = ctx->block_vars;
8605 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8607 static void
8608 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8609 omp_context *ctx)
8611 struct omp_for_data fd;
8612 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8613 return;
8615 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8616 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8617 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8618 if (!fd.ordered)
8619 return;
8621 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8622 tree c = gimple_omp_ordered_clauses (ord_stmt);
8623 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8624 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8626 /* Merge depend clauses from multiple adjacent
8627 #pragma omp ordered depend(sink:...) constructs
8628 into one #pragma omp ordered depend(sink:...), so that
8629 we can optimize them together. */
8630 gimple_stmt_iterator gsi = *gsi_p;
8631 gsi_next (&gsi);
8632 while (!gsi_end_p (gsi))
8634 gimple *stmt = gsi_stmt (gsi);
8635 if (is_gimple_debug (stmt)
8636 || gimple_code (stmt) == GIMPLE_NOP)
8638 gsi_next (&gsi);
8639 continue;
8641 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8642 break;
8643 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8644 c = gimple_omp_ordered_clauses (ord_stmt2);
8645 if (c == NULL_TREE
8646 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8647 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8648 break;
8649 while (*list_p)
8650 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8651 *list_p = c;
8652 gsi_remove (&gsi, true);
8656 /* Canonicalize sink dependence clauses into one folded clause if
8657 possible.
8659 The basic algorithm is to create a sink vector whose first
8660 element is the GCD of all the first elements, and whose remaining
8661 elements are the minimum of the subsequent columns.
8663 We ignore dependence vectors whose first element is zero because
8664 such dependencies are known to be executed by the same thread.
8666 We take into account the direction of the loop, so a minimum
8667 becomes a maximum if the loop is iterating forwards. We also
8668 ignore sink clauses where the loop direction is unknown, or where
8669 the offsets are clearly invalid because they are not a multiple
8670 of the loop increment.
8672 For example:
8674 #pragma omp for ordered(2)
8675 for (i=0; i < N; ++i)
8676 for (j=0; j < M; ++j)
8678 #pragma omp ordered \
8679 depend(sink:i-8,j-2) \
8680 depend(sink:i,j-1) \ // Completely ignored because i+0.
8681 depend(sink:i-4,j-3) \
8682 depend(sink:i-6,j-4)
8683 #pragma omp ordered depend(source)
8686 Folded clause is:
8688 depend(sink:-gcd(8,4,6),-min(2,3,4))
8689 -or-
8690 depend(sink:-2,-2)
8693 /* FIXME: Computing GCD's where the first element is zero is
8694 non-trivial in the presence of collapsed loops. Do this later. */
8695 if (fd.collapse > 1)
8696 return;
8698 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8700 /* wide_int is not a POD so it must be default-constructed. */
8701 for (unsigned i = 0; i != 2 * len - 1; ++i)
8702 new (static_cast<void*>(folded_deps + i)) wide_int ();
8704 tree folded_dep = NULL_TREE;
8705 /* TRUE if the first dimension's offset is negative. */
8706 bool neg_offset_p = false;
8708 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8709 unsigned int i;
8710 while ((c = *list_p) != NULL)
8712 bool remove = false;
8714 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8715 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8716 goto next_ordered_clause;
8718 tree vec;
8719 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8720 vec && TREE_CODE (vec) == TREE_LIST;
8721 vec = TREE_CHAIN (vec), ++i)
8723 gcc_assert (i < len);
8725 /* omp_extract_for_data has canonicalized the condition. */
8726 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8727 || fd.loops[i].cond_code == GT_EXPR);
8728 bool forward = fd.loops[i].cond_code == LT_EXPR;
8729 bool maybe_lexically_later = true;
8731 /* While the committee makes up its mind, bail if we have any
8732 non-constant steps. */
8733 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8734 goto lower_omp_ordered_ret;
8736 tree itype = TREE_TYPE (TREE_VALUE (vec));
8737 if (POINTER_TYPE_P (itype))
8738 itype = sizetype;
8739 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8740 TYPE_PRECISION (itype),
8741 TYPE_SIGN (itype));
8743 /* Ignore invalid offsets that are not multiples of the step. */
8744 if (!wi::multiple_of_p (wi::abs (offset),
8745 wi::abs (wi::to_wide (fd.loops[i].step)),
8746 UNSIGNED))
8748 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8749 "ignoring sink clause with offset that is not "
8750 "a multiple of the loop step");
8751 remove = true;
8752 goto next_ordered_clause;
8755 /* Calculate the first dimension. The first dimension of
8756 the folded dependency vector is the GCD of the first
8757 elements, while ignoring any first elements whose offset
8758 is 0. */
8759 if (i == 0)
8761 /* Ignore dependence vectors whose first dimension is 0. */
8762 if (offset == 0)
8764 remove = true;
8765 goto next_ordered_clause;
8767 else
8769 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8771 error_at (OMP_CLAUSE_LOCATION (c),
8772 "first offset must be in opposite direction "
8773 "of loop iterations");
8774 goto lower_omp_ordered_ret;
8776 if (forward)
8777 offset = -offset;
8778 neg_offset_p = forward;
8779 /* Initialize the first time around. */
8780 if (folded_dep == NULL_TREE)
8782 folded_dep = c;
8783 folded_deps[0] = offset;
8785 else
8786 folded_deps[0] = wi::gcd (folded_deps[0],
8787 offset, UNSIGNED);
8790 /* Calculate minimum for the remaining dimensions. */
8791 else
8793 folded_deps[len + i - 1] = offset;
8794 if (folded_dep == c)
8795 folded_deps[i] = offset;
8796 else if (maybe_lexically_later
8797 && !wi::eq_p (folded_deps[i], offset))
8799 if (forward ^ wi::gts_p (folded_deps[i], offset))
8801 unsigned int j;
8802 folded_dep = c;
8803 for (j = 1; j <= i; j++)
8804 folded_deps[j] = folded_deps[len + j - 1];
8806 else
8807 maybe_lexically_later = false;
8811 gcc_assert (i == len);
8813 remove = true;
8815 next_ordered_clause:
8816 if (remove)
8817 *list_p = OMP_CLAUSE_CHAIN (c);
8818 else
8819 list_p = &OMP_CLAUSE_CHAIN (c);
8822 if (folded_dep)
8824 if (neg_offset_p)
8825 folded_deps[0] = -folded_deps[0];
8827 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8828 if (POINTER_TYPE_P (itype))
8829 itype = sizetype;
8831 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8832 = wide_int_to_tree (itype, folded_deps[0]);
8833 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8834 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8837 lower_omp_ordered_ret:
8839 /* Ordered without clauses is #pragma omp threads, while we want
8840 a nop instead if we remove all clauses. */
8841 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8842 gsi_replace (gsi_p, gimple_build_nop (), true);
8846 /* Expand code for an OpenMP ordered directive. */
8848 static void
8849 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8851 tree block;
8852 gimple *stmt = gsi_stmt (*gsi_p), *g;
8853 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8854 gcall *x;
8855 gbind *bind;
8856 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8857 OMP_CLAUSE_SIMD);
8858 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8859 loop. */
8860 bool maybe_simt
8861 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8862 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8863 OMP_CLAUSE_THREADS);
8865 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8866 OMP_CLAUSE_DEPEND))
8868 /* FIXME: This is needs to be moved to the expansion to verify various
8869 conditions only testable on cfg with dominators computed, and also
8870 all the depend clauses to be merged still might need to be available
8871 for the runtime checks. */
8872 if (0)
8873 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8874 return;
8877 push_gimplify_context ();
8879 block = make_node (BLOCK);
8880 bind = gimple_build_bind (NULL, NULL, block);
8881 gsi_replace (gsi_p, bind, true);
8882 gimple_bind_add_stmt (bind, stmt);
8884 if (simd)
8886 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8887 build_int_cst (NULL_TREE, threads));
8888 cfun->has_simduid_loops = true;
8890 else
8891 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8893 gimple_bind_add_stmt (bind, x);
8895 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8896 if (maybe_simt)
8898 counter = create_tmp_var (integer_type_node);
8899 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8900 gimple_call_set_lhs (g, counter);
8901 gimple_bind_add_stmt (bind, g);
8903 body = create_artificial_label (UNKNOWN_LOCATION);
8904 test = create_artificial_label (UNKNOWN_LOCATION);
8905 gimple_bind_add_stmt (bind, gimple_build_label (body));
8907 tree simt_pred = create_tmp_var (integer_type_node);
8908 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8909 gimple_call_set_lhs (g, simt_pred);
8910 gimple_bind_add_stmt (bind, g);
8912 tree t = create_artificial_label (UNKNOWN_LOCATION);
8913 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8914 gimple_bind_add_stmt (bind, g);
8916 gimple_bind_add_stmt (bind, gimple_build_label (t));
8918 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8919 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8920 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8921 gimple_omp_set_body (stmt, NULL);
8923 if (maybe_simt)
8925 gimple_bind_add_stmt (bind, gimple_build_label (test));
8926 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8927 gimple_bind_add_stmt (bind, g);
8929 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8930 tree nonneg = create_tmp_var (integer_type_node);
8931 gimple_seq tseq = NULL;
8932 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8933 gimple_bind_add_seq (bind, tseq);
8935 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8936 gimple_call_set_lhs (g, nonneg);
8937 gimple_bind_add_stmt (bind, g);
8939 tree end = create_artificial_label (UNKNOWN_LOCATION);
8940 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8941 gimple_bind_add_stmt (bind, g);
8943 gimple_bind_add_stmt (bind, gimple_build_label (end));
8945 if (simd)
8946 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8947 build_int_cst (NULL_TREE, threads));
8948 else
8949 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8951 gimple_bind_add_stmt (bind, x);
8953 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8955 pop_gimplify_context (bind);
8957 gimple_bind_append_vars (bind, ctx->block_vars);
8958 BLOCK_VARS (block) = gimple_bind_vars (bind);
8962 /* Expand code for an OpenMP scan directive and the structured block
8963 before the scan directive. */
8965 static void
8966 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8968 gimple *stmt = gsi_stmt (*gsi_p);
8969 bool has_clauses
8970 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8971 tree lane = NULL_TREE;
8972 gimple_seq before = NULL;
8973 omp_context *octx = ctx->outer;
8974 gcc_assert (octx);
8975 if (octx->scan_exclusive && !has_clauses)
8977 gimple_stmt_iterator gsi2 = *gsi_p;
8978 gsi_next (&gsi2);
8979 gimple *stmt2 = gsi_stmt (gsi2);
8980 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8981 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8982 the one with exclusive clause(s), comes first. */
8983 if (stmt2
8984 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8985 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8987 gsi_remove (gsi_p, false);
8988 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8989 ctx = maybe_lookup_ctx (stmt2);
8990 gcc_assert (ctx);
8991 lower_omp_scan (gsi_p, ctx);
8992 return;
8996 bool input_phase = has_clauses ^ octx->scan_inclusive;
8997 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8998 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
8999 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9000 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9001 && !gimple_omp_for_combined_p (octx->stmt));
9002 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9003 if (is_for_simd && octx->for_simd_scan_phase)
9004 is_simd = false;
9005 if (is_simd)
9006 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9007 OMP_CLAUSE__SIMDUID_))
9009 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9010 lane = create_tmp_var (unsigned_type_node);
9011 tree t = build_int_cst (integer_type_node,
9012 input_phase ? 1
9013 : octx->scan_inclusive ? 2 : 3);
9014 gimple *g
9015 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9016 gimple_call_set_lhs (g, lane);
9017 gimple_seq_add_stmt (&before, g);
9020 if (is_simd || is_for)
9022 for (tree c = gimple_omp_for_clauses (octx->stmt);
9023 c; c = OMP_CLAUSE_CHAIN (c))
9024 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9025 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9027 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9028 tree var = OMP_CLAUSE_DECL (c);
9029 tree new_var = lookup_decl (var, octx);
9030 tree val = new_var;
9031 tree var2 = NULL_TREE;
9032 tree var3 = NULL_TREE;
9033 tree var4 = NULL_TREE;
9034 tree lane0 = NULL_TREE;
9035 tree new_vard = new_var;
9036 if (omp_is_reference (var))
9038 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9039 val = new_var;
9041 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9043 val = DECL_VALUE_EXPR (new_vard);
9044 if (new_vard != new_var)
9046 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9047 val = TREE_OPERAND (val, 0);
9049 if (TREE_CODE (val) == ARRAY_REF
9050 && VAR_P (TREE_OPERAND (val, 0)))
9052 tree v = TREE_OPERAND (val, 0);
9053 if (lookup_attribute ("omp simd array",
9054 DECL_ATTRIBUTES (v)))
9056 val = unshare_expr (val);
9057 lane0 = TREE_OPERAND (val, 1);
9058 TREE_OPERAND (val, 1) = lane;
9059 var2 = lookup_decl (v, octx);
9060 if (octx->scan_exclusive)
9061 var4 = lookup_decl (var2, octx);
9062 if (input_phase
9063 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9064 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9065 if (!input_phase)
9067 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9068 var2, lane, NULL_TREE, NULL_TREE);
9069 TREE_THIS_NOTRAP (var2) = 1;
9070 if (octx->scan_exclusive)
9072 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9073 var4, lane, NULL_TREE,
9074 NULL_TREE);
9075 TREE_THIS_NOTRAP (var4) = 1;
9078 else
9079 var2 = val;
9082 gcc_assert (var2);
9084 else
9086 var2 = build_outer_var_ref (var, octx);
9087 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9089 var3 = maybe_lookup_decl (new_vard, octx);
9090 if (var3 == new_vard || var3 == NULL_TREE)
9091 var3 = NULL_TREE;
9092 else if (is_simd && octx->scan_exclusive && !input_phase)
9094 var4 = maybe_lookup_decl (var3, octx);
9095 if (var4 == var3 || var4 == NULL_TREE)
9097 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9099 var4 = var3;
9100 var3 = NULL_TREE;
9102 else
9103 var4 = NULL_TREE;
9107 if (is_simd
9108 && octx->scan_exclusive
9109 && !input_phase
9110 && var4 == NULL_TREE)
9111 var4 = create_tmp_var (TREE_TYPE (val));
9113 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9115 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9116 if (input_phase)
9118 if (var3)
9120 /* If we've added a separate identity element
9121 variable, copy it over into val. */
9122 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9123 var3);
9124 gimplify_and_add (x, &before);
9126 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9128 /* Otherwise, assign to it the identity element. */
9129 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9130 if (is_for)
9131 tseq = copy_gimple_seq_and_replace_locals (tseq);
9132 tree ref = build_outer_var_ref (var, octx);
9133 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9134 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9135 if (x)
9137 if (new_vard != new_var)
9138 val = build_fold_addr_expr_loc (clause_loc, val);
9139 SET_DECL_VALUE_EXPR (new_vard, val);
9141 SET_DECL_VALUE_EXPR (placeholder, ref);
9142 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9143 lower_omp (&tseq, octx);
9144 if (x)
9145 SET_DECL_VALUE_EXPR (new_vard, x);
9146 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9147 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9148 gimple_seq_add_seq (&before, tseq);
9149 if (is_simd)
9150 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9153 else if (is_simd)
9155 tree x;
9156 if (octx->scan_exclusive)
9158 tree v4 = unshare_expr (var4);
9159 tree v2 = unshare_expr (var2);
9160 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9161 gimplify_and_add (x, &before);
9163 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9164 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9165 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9166 tree vexpr = val;
9167 if (x && new_vard != new_var)
9168 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9169 if (x)
9170 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9171 SET_DECL_VALUE_EXPR (placeholder, var2);
9172 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9173 lower_omp (&tseq, octx);
9174 gimple_seq_add_seq (&before, tseq);
9175 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9176 if (x)
9177 SET_DECL_VALUE_EXPR (new_vard, x);
9178 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9179 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9180 if (octx->scan_inclusive)
9182 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9183 var2);
9184 gimplify_and_add (x, &before);
9186 else if (lane0 == NULL_TREE)
9188 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9189 var4);
9190 gimplify_and_add (x, &before);
9194 else
9196 if (input_phase)
9198 /* input phase. Set val to initializer before
9199 the body. */
9200 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9201 gimplify_assign (val, x, &before);
9203 else if (is_simd)
9205 /* scan phase. */
9206 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9207 if (code == MINUS_EXPR)
9208 code = PLUS_EXPR;
9210 tree x = build2 (code, TREE_TYPE (var2),
9211 unshare_expr (var2), unshare_expr (val));
9212 if (octx->scan_inclusive)
9214 gimplify_assign (unshare_expr (var2), x, &before);
9215 gimplify_assign (val, var2, &before);
9217 else
9219 gimplify_assign (unshare_expr (var4),
9220 unshare_expr (var2), &before);
9221 gimplify_assign (var2, x, &before);
9222 if (lane0 == NULL_TREE)
9223 gimplify_assign (val, var4, &before);
9227 if (octx->scan_exclusive && !input_phase && lane0)
9229 tree vexpr = unshare_expr (var4);
9230 TREE_OPERAND (vexpr, 1) = lane0;
9231 if (new_vard != new_var)
9232 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9233 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9237 if (is_simd && !is_for_simd)
9239 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9240 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9241 gsi_replace (gsi_p, gimple_build_nop (), true);
9242 return;
9244 lower_omp (gimple_omp_body_ptr (stmt), octx);
9245 if (before)
9247 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9248 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9253 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9254 substitution of a couple of function calls. But in the NAMED case,
9255 requires that languages coordinate a symbol name. It is therefore
9256 best put here in common code. */
9258 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9260 static void
9261 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9263 tree block;
9264 tree name, lock, unlock;
9265 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9266 gbind *bind;
9267 location_t loc = gimple_location (stmt);
9268 gimple_seq tbody;
9270 name = gimple_omp_critical_name (stmt);
9271 if (name)
9273 tree decl;
9275 if (!critical_name_mutexes)
9276 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9278 tree *n = critical_name_mutexes->get (name);
9279 if (n == NULL)
9281 char *new_str;
9283 decl = create_tmp_var_raw (ptr_type_node);
9285 new_str = ACONCAT ((".gomp_critical_user_",
9286 IDENTIFIER_POINTER (name), NULL));
9287 DECL_NAME (decl) = get_identifier (new_str);
9288 TREE_PUBLIC (decl) = 1;
9289 TREE_STATIC (decl) = 1;
9290 DECL_COMMON (decl) = 1;
9291 DECL_ARTIFICIAL (decl) = 1;
9292 DECL_IGNORED_P (decl) = 1;
9294 varpool_node::finalize_decl (decl);
9296 critical_name_mutexes->put (name, decl);
9298 else
9299 decl = *n;
9301 /* If '#pragma omp critical' is inside offloaded region or
9302 inside function marked as offloadable, the symbol must be
9303 marked as offloadable too. */
9304 omp_context *octx;
9305 if (cgraph_node::get (current_function_decl)->offloadable)
9306 varpool_node::get_create (decl)->offloadable = 1;
9307 else
9308 for (octx = ctx->outer; octx; octx = octx->outer)
9309 if (is_gimple_omp_offloaded (octx->stmt))
9311 varpool_node::get_create (decl)->offloadable = 1;
9312 break;
9315 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9316 lock = build_call_expr_loc (loc, lock, 1,
9317 build_fold_addr_expr_loc (loc, decl));
9319 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9320 unlock = build_call_expr_loc (loc, unlock, 1,
9321 build_fold_addr_expr_loc (loc, decl));
9323 else
9325 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9326 lock = build_call_expr_loc (loc, lock, 0);
9328 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9329 unlock = build_call_expr_loc (loc, unlock, 0);
9332 push_gimplify_context ();
9334 block = make_node (BLOCK);
9335 bind = gimple_build_bind (NULL, NULL, block);
9336 gsi_replace (gsi_p, bind, true);
9337 gimple_bind_add_stmt (bind, stmt);
9339 tbody = gimple_bind_body (bind);
9340 gimplify_and_add (lock, &tbody);
9341 gimple_bind_set_body (bind, tbody);
9343 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9344 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9345 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9346 gimple_omp_set_body (stmt, NULL);
9348 tbody = gimple_bind_body (bind);
9349 gimplify_and_add (unlock, &tbody);
9350 gimple_bind_set_body (bind, tbody);
9352 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9354 pop_gimplify_context (bind);
9355 gimple_bind_append_vars (bind, ctx->block_vars);
9356 BLOCK_VARS (block) = gimple_bind_vars (bind);
9359 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9360 for a lastprivate clause. Given a loop control predicate of (V
9361 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9362 is appended to *DLIST, iterator initialization is appended to
9363 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9364 to be emitted in a critical section. */
9366 static void
9367 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9368 gimple_seq *dlist, gimple_seq *clist,
9369 struct omp_context *ctx)
9371 tree clauses, cond, vinit;
9372 enum tree_code cond_code;
9373 gimple_seq stmts;
9375 cond_code = fd->loop.cond_code;
9376 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9378 /* When possible, use a strict equality expression. This can let VRP
9379 type optimizations deduce the value and remove a copy. */
9380 if (tree_fits_shwi_p (fd->loop.step))
9382 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9383 if (step == 1 || step == -1)
9384 cond_code = EQ_EXPR;
9387 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9388 || gimple_omp_for_grid_phony (fd->for_stmt))
9389 cond = omp_grid_lastprivate_predicate (fd);
9390 else
9392 tree n2 = fd->loop.n2;
9393 if (fd->collapse > 1
9394 && TREE_CODE (n2) != INTEGER_CST
9395 && gimple_omp_for_combined_into_p (fd->for_stmt))
9397 struct omp_context *taskreg_ctx = NULL;
9398 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9400 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9401 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9402 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9404 if (gimple_omp_for_combined_into_p (gfor))
9406 gcc_assert (ctx->outer->outer
9407 && is_parallel_ctx (ctx->outer->outer));
9408 taskreg_ctx = ctx->outer->outer;
9410 else
9412 struct omp_for_data outer_fd;
9413 omp_extract_for_data (gfor, &outer_fd, NULL);
9414 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9417 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9418 taskreg_ctx = ctx->outer->outer;
9420 else if (is_taskreg_ctx (ctx->outer))
9421 taskreg_ctx = ctx->outer;
9422 if (taskreg_ctx)
9424 int i;
9425 tree taskreg_clauses
9426 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9427 tree innerc = omp_find_clause (taskreg_clauses,
9428 OMP_CLAUSE__LOOPTEMP_);
9429 gcc_assert (innerc);
9430 for (i = 0; i < fd->collapse; i++)
9432 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9433 OMP_CLAUSE__LOOPTEMP_);
9434 gcc_assert (innerc);
9436 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9437 OMP_CLAUSE__LOOPTEMP_);
9438 if (innerc)
9439 n2 = fold_convert (TREE_TYPE (n2),
9440 lookup_decl (OMP_CLAUSE_DECL (innerc),
9441 taskreg_ctx));
9444 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9447 clauses = gimple_omp_for_clauses (fd->for_stmt);
9448 stmts = NULL;
9449 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9450 if (!gimple_seq_empty_p (stmts))
9452 gimple_seq_add_seq (&stmts, *dlist);
9453 *dlist = stmts;
9455 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9456 vinit = fd->loop.n1;
9457 if (cond_code == EQ_EXPR
9458 && tree_fits_shwi_p (fd->loop.n2)
9459 && ! integer_zerop (fd->loop.n2))
9460 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9461 else
9462 vinit = unshare_expr (vinit);
9464 /* Initialize the iterator variable, so that threads that don't execute
9465 any iterations don't execute the lastprivate clauses by accident. */
9466 gimplify_assign (fd->loop.v, vinit, body_p);
9470 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9472 static tree
9473 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9474 struct walk_stmt_info *wi)
9476 gimple *stmt = gsi_stmt (*gsi_p);
9478 *handled_ops_p = true;
9479 switch (gimple_code (stmt))
9481 WALK_SUBSTMTS;
9483 case GIMPLE_OMP_FOR:
9484 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9485 && gimple_omp_for_combined_into_p (stmt))
9486 *handled_ops_p = false;
9487 break;
9489 case GIMPLE_OMP_SCAN:
9490 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9491 return integer_zero_node;
9492 default:
9493 break;
9495 return NULL;
9498 /* Helper function for lower_omp_for, add transformations for a worksharing
9499 loop with scan directives inside of it.
9500 For worksharing loop not combined with simd, transform:
9501 #pragma omp for reduction(inscan,+:r) private(i)
9502 for (i = 0; i < n; i = i + 1)
9505 update (r);
9507 #pragma omp scan inclusive(r)
9509 use (r);
9513 into two worksharing loops + code to merge results:
9515 num_threads = omp_get_num_threads ();
9516 thread_num = omp_get_thread_num ();
9517 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9518 <D.2099>:
9519 var2 = r;
9520 goto <D.2101>;
9521 <D.2100>:
9522 // For UDRs this is UDR init, or if ctors are needed, copy from
9523 // var3 that has been constructed to contain the neutral element.
9524 var2 = 0;
9525 <D.2101>:
9526 ivar = 0;
9527 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9528 // a shared array with num_threads elements and rprivb to a local array
9529 // number of elements equal to the number of (contiguous) iterations the
9530 // current thread will perform. controlb and controlp variables are
9531 // temporaries to handle deallocation of rprivb at the end of second
9532 // GOMP_FOR.
9533 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9534 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9535 for (i = 0; i < n; i = i + 1)
9538 // For UDRs this is UDR init or copy from var3.
9539 r = 0;
9540 // This is the input phase from user code.
9541 update (r);
9544 // For UDRs this is UDR merge.
9545 var2 = var2 + r;
9546 // Rather than handing it over to the user, save to local thread's
9547 // array.
9548 rprivb[ivar] = var2;
9549 // For exclusive scan, the above two statements are swapped.
9550 ivar = ivar + 1;
9553 // And remember the final value from this thread's into the shared
9554 // rpriva array.
9555 rpriva[(sizetype) thread_num] = var2;
9556 // If more than one thread, compute using Work-Efficient prefix sum
9557 // the inclusive parallel scan of the rpriva array.
9558 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9559 <D.2102>:
9560 GOMP_barrier ();
9561 down = 0;
9562 k = 1;
9563 num_threadsu = (unsigned int) num_threads;
9564 thread_numup1 = (unsigned int) thread_num + 1;
9565 <D.2108>:
9566 twok = k << 1;
9567 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9568 <D.2110>:
9569 down = 4294967295;
9570 k = k >> 1;
9571 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9572 <D.2112>:
9573 k = k >> 1;
9574 <D.2111>:
9575 twok = k << 1;
9576 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9577 mul = REALPART_EXPR <cplx>;
9578 ovf = IMAGPART_EXPR <cplx>;
9579 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9580 <D.2116>:
9581 andv = k & down;
9582 andvm1 = andv + 4294967295;
9583 l = mul + andvm1;
9584 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9585 <D.2120>:
9586 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9587 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9588 rpriva[l] = rpriva[l - k] + rpriva[l];
9589 <D.2117>:
9590 if (down == 0) goto <D.2121>; else goto <D.2122>;
9591 <D.2121>:
9592 k = k << 1;
9593 goto <D.2123>;
9594 <D.2122>:
9595 k = k >> 1;
9596 <D.2123>:
9597 GOMP_barrier ();
9598 if (k != 0) goto <D.2108>; else goto <D.2103>;
9599 <D.2103>:
9600 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9601 <D.2124>:
9602 // For UDRs this is UDR init or copy from var3.
9603 var2 = 0;
9604 goto <D.2126>;
9605 <D.2125>:
9606 var2 = rpriva[thread_num - 1];
9607 <D.2126>:
9608 ivar = 0;
9609 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9610 reduction(inscan,+:r) private(i)
9611 for (i = 0; i < n; i = i + 1)
9614 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9615 r = var2 + rprivb[ivar];
9618 // This is the scan phase from user code.
9619 use (r);
9620 // Plus a bump of the iterator.
9621 ivar = ivar + 1;
9623 } */
9625 static void
9626 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9627 struct omp_for_data *fd, omp_context *ctx)
9629 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9630 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9632 gimple_seq body = gimple_omp_body (stmt);
9633 gimple_stmt_iterator input1_gsi = gsi_none ();
9634 struct walk_stmt_info wi;
9635 memset (&wi, 0, sizeof (wi));
9636 wi.val_only = true;
9637 wi.info = (void *) &input1_gsi;
9638 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9639 gcc_assert (!gsi_end_p (input1_gsi));
9641 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9642 gimple_stmt_iterator gsi = input1_gsi;
9643 gsi_next (&gsi);
9644 gimple_stmt_iterator scan1_gsi = gsi;
9645 gimple *scan_stmt1 = gsi_stmt (gsi);
9646 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9648 gimple_seq input_body = gimple_omp_body (input_stmt1);
9649 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9650 gimple_omp_set_body (input_stmt1, NULL);
9651 gimple_omp_set_body (scan_stmt1, NULL);
9652 gimple_omp_set_body (stmt, NULL);
9654 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9655 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9656 gimple_omp_set_body (stmt, body);
9657 gimple_omp_set_body (input_stmt1, input_body);
9659 gimple_stmt_iterator input2_gsi = gsi_none ();
9660 memset (&wi, 0, sizeof (wi));
9661 wi.val_only = true;
9662 wi.info = (void *) &input2_gsi;
9663 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9664 gcc_assert (!gsi_end_p (input2_gsi));
9666 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9667 gsi = input2_gsi;
9668 gsi_next (&gsi);
9669 gimple_stmt_iterator scan2_gsi = gsi;
9670 gimple *scan_stmt2 = gsi_stmt (gsi);
9671 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9672 gimple_omp_set_body (scan_stmt2, scan_body);
9674 gimple_stmt_iterator input3_gsi = gsi_none ();
9675 gimple_stmt_iterator scan3_gsi = gsi_none ();
9676 gimple_stmt_iterator input4_gsi = gsi_none ();
9677 gimple_stmt_iterator scan4_gsi = gsi_none ();
9678 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9679 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9680 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9681 if (is_for_simd)
9683 memset (&wi, 0, sizeof (wi));
9684 wi.val_only = true;
9685 wi.info = (void *) &input3_gsi;
9686 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9687 gcc_assert (!gsi_end_p (input3_gsi));
9689 input_stmt3 = gsi_stmt (input3_gsi);
9690 gsi = input3_gsi;
9691 gsi_next (&gsi);
9692 scan3_gsi = gsi;
9693 scan_stmt3 = gsi_stmt (gsi);
9694 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9696 memset (&wi, 0, sizeof (wi));
9697 wi.val_only = true;
9698 wi.info = (void *) &input4_gsi;
9699 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9700 gcc_assert (!gsi_end_p (input4_gsi));
9702 input_stmt4 = gsi_stmt (input4_gsi);
9703 gsi = input4_gsi;
9704 gsi_next (&gsi);
9705 scan4_gsi = gsi;
9706 scan_stmt4 = gsi_stmt (gsi);
9707 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9709 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9710 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9713 tree num_threads = create_tmp_var (integer_type_node);
9714 tree thread_num = create_tmp_var (integer_type_node);
9715 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9716 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9717 gimple *g = gimple_build_call (nthreads_decl, 0);
9718 gimple_call_set_lhs (g, num_threads);
9719 gimple_seq_add_stmt (body_p, g);
9720 g = gimple_build_call (threadnum_decl, 0);
9721 gimple_call_set_lhs (g, thread_num);
9722 gimple_seq_add_stmt (body_p, g);
9724 tree ivar = create_tmp_var (sizetype);
9725 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9726 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9727 tree k = create_tmp_var (unsigned_type_node);
9728 tree l = create_tmp_var (unsigned_type_node);
9730 gimple_seq clist = NULL, mdlist = NULL;
9731 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9732 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9733 gimple_seq scan1_list = NULL, input2_list = NULL;
9734 gimple_seq last_list = NULL, reduc_list = NULL;
9735 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9736 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9737 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9739 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9740 tree var = OMP_CLAUSE_DECL (c);
9741 tree new_var = lookup_decl (var, ctx);
9742 tree var3 = NULL_TREE;
9743 tree new_vard = new_var;
9744 if (omp_is_reference (var))
9745 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9746 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9748 var3 = maybe_lookup_decl (new_vard, ctx);
9749 if (var3 == new_vard)
9750 var3 = NULL_TREE;
9753 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9754 tree rpriva = create_tmp_var (ptype);
9755 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9756 OMP_CLAUSE_DECL (nc) = rpriva;
9757 *cp1 = nc;
9758 cp1 = &OMP_CLAUSE_CHAIN (nc);
9760 tree rprivb = create_tmp_var (ptype);
9761 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9762 OMP_CLAUSE_DECL (nc) = rprivb;
9763 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9764 *cp1 = nc;
9765 cp1 = &OMP_CLAUSE_CHAIN (nc);
9767 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9768 if (new_vard != new_var)
9769 TREE_ADDRESSABLE (var2) = 1;
9770 gimple_add_tmp_var (var2);
9772 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9773 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9774 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9775 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9776 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9778 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9779 thread_num, integer_minus_one_node);
9780 x = fold_convert_loc (clause_loc, sizetype, x);
9781 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9782 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9783 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9784 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9786 x = fold_convert_loc (clause_loc, sizetype, l);
9787 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9788 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9789 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9790 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9792 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9793 x = fold_convert_loc (clause_loc, sizetype, x);
9794 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9795 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9796 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9797 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9799 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9800 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9801 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9802 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9804 tree var4 = is_for_simd ? new_var : var2;
9805 tree var5 = NULL_TREE, var6 = NULL_TREE;
9806 if (is_for_simd)
9808 var5 = lookup_decl (var, input_simd_ctx);
9809 var6 = lookup_decl (var, scan_simd_ctx);
9810 if (new_vard != new_var)
9812 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9813 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9816 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9818 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9819 tree val = var2;
9821 x = lang_hooks.decls.omp_clause_default_ctor
9822 (c, var2, build_outer_var_ref (var, ctx));
9823 if (x)
9824 gimplify_and_add (x, &clist);
9826 x = build_outer_var_ref (var, ctx);
9827 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9829 gimplify_and_add (x, &thr01_list);
9831 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9832 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9833 if (var3)
9835 x = unshare_expr (var4);
9836 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9837 gimplify_and_add (x, &thrn1_list);
9838 x = unshare_expr (var4);
9839 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9840 gimplify_and_add (x, &thr02_list);
9842 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9844 /* Otherwise, assign to it the identity element. */
9845 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9846 tseq = copy_gimple_seq_and_replace_locals (tseq);
9847 if (!is_for_simd)
9849 if (new_vard != new_var)
9850 val = build_fold_addr_expr_loc (clause_loc, val);
9851 SET_DECL_VALUE_EXPR (new_vard, val);
9852 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9854 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9855 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9856 lower_omp (&tseq, ctx);
9857 gimple_seq_add_seq (&thrn1_list, tseq);
9858 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9859 lower_omp (&tseq, ctx);
9860 gimple_seq_add_seq (&thr02_list, tseq);
9861 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9862 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9863 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9864 if (y)
9865 SET_DECL_VALUE_EXPR (new_vard, y);
9866 else
9868 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9869 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9873 x = unshare_expr (var4);
9874 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9875 gimplify_and_add (x, &thrn2_list);
9877 if (is_for_simd)
9879 x = unshare_expr (rprivb_ref);
9880 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9881 gimplify_and_add (x, &scan1_list);
9883 else
9885 if (ctx->scan_exclusive)
9887 x = unshare_expr (rprivb_ref);
9888 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9889 gimplify_and_add (x, &scan1_list);
9892 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9893 tseq = copy_gimple_seq_and_replace_locals (tseq);
9894 SET_DECL_VALUE_EXPR (placeholder, var2);
9895 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9896 lower_omp (&tseq, ctx);
9897 gimple_seq_add_seq (&scan1_list, tseq);
9899 if (ctx->scan_inclusive)
9901 x = unshare_expr (rprivb_ref);
9902 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9903 gimplify_and_add (x, &scan1_list);
9907 x = unshare_expr (rpriva_ref);
9908 x = lang_hooks.decls.omp_clause_assign_op (c, x,
9909 unshare_expr (var4));
9910 gimplify_and_add (x, &mdlist);
9912 x = unshare_expr (is_for_simd ? var6 : new_var);
9913 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
9914 gimplify_and_add (x, &input2_list);
9916 val = rprivb_ref;
9917 if (new_vard != new_var)
9918 val = build_fold_addr_expr_loc (clause_loc, val);
9920 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9921 tseq = copy_gimple_seq_and_replace_locals (tseq);
9922 SET_DECL_VALUE_EXPR (new_vard, val);
9923 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9924 if (is_for_simd)
9926 SET_DECL_VALUE_EXPR (placeholder, var6);
9927 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9929 else
9930 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9931 lower_omp (&tseq, ctx);
9932 if (y)
9933 SET_DECL_VALUE_EXPR (new_vard, y);
9934 else
9936 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9937 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9939 if (!is_for_simd)
9941 SET_DECL_VALUE_EXPR (placeholder, new_var);
9942 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9943 lower_omp (&tseq, ctx);
9945 gimple_seq_add_seq (&input2_list, tseq);
9947 x = build_outer_var_ref (var, ctx);
9948 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9949 gimplify_and_add (x, &last_list);
9951 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9952 gimplify_and_add (x, &reduc_list);
9953 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9954 tseq = copy_gimple_seq_and_replace_locals (tseq);
9955 val = rprival_ref;
9956 if (new_vard != new_var)
9957 val = build_fold_addr_expr_loc (clause_loc, val);
9958 SET_DECL_VALUE_EXPR (new_vard, val);
9959 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9960 SET_DECL_VALUE_EXPR (placeholder, var2);
9961 lower_omp (&tseq, ctx);
9962 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9963 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9964 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9965 if (y)
9966 SET_DECL_VALUE_EXPR (new_vard, y);
9967 else
9969 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9970 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9972 gimple_seq_add_seq (&reduc_list, tseq);
9973 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9974 gimplify_and_add (x, &reduc_list);
9976 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9977 if (x)
9978 gimplify_and_add (x, dlist);
9980 else
9982 x = build_outer_var_ref (var, ctx);
9983 gimplify_assign (unshare_expr (var4), x, &thr01_list);
9985 x = omp_reduction_init (c, TREE_TYPE (new_var));
9986 gimplify_assign (unshare_expr (var4), unshare_expr (x),
9987 &thrn1_list);
9988 gimplify_assign (unshare_expr (var4), x, &thr02_list);
9990 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
9992 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9993 if (code == MINUS_EXPR)
9994 code = PLUS_EXPR;
9996 if (is_for_simd)
9997 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
9998 else
10000 if (ctx->scan_exclusive)
10001 gimplify_assign (unshare_expr (rprivb_ref), var2,
10002 &scan1_list);
10003 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10004 gimplify_assign (var2, x, &scan1_list);
10005 if (ctx->scan_inclusive)
10006 gimplify_assign (unshare_expr (rprivb_ref), var2,
10007 &scan1_list);
10010 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10011 &mdlist);
10013 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10014 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10016 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10017 &last_list);
10019 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10020 unshare_expr (rprival_ref));
10021 gimplify_assign (rprival_ref, x, &reduc_list);
10025 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10026 gimple_seq_add_stmt (&scan1_list, g);
10027 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10028 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10029 ? scan_stmt4 : scan_stmt2), g);
10031 tree controlb = create_tmp_var (boolean_type_node);
10032 tree controlp = create_tmp_var (ptr_type_node);
10033 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10034 OMP_CLAUSE_DECL (nc) = controlb;
10035 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10036 *cp1 = nc;
10037 cp1 = &OMP_CLAUSE_CHAIN (nc);
10038 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10039 OMP_CLAUSE_DECL (nc) = controlp;
10040 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10041 *cp1 = nc;
10042 cp1 = &OMP_CLAUSE_CHAIN (nc);
10043 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10044 OMP_CLAUSE_DECL (nc) = controlb;
10045 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10046 *cp2 = nc;
10047 cp2 = &OMP_CLAUSE_CHAIN (nc);
10048 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10049 OMP_CLAUSE_DECL (nc) = controlp;
10050 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10051 *cp2 = nc;
10052 cp2 = &OMP_CLAUSE_CHAIN (nc);
10054 *cp1 = gimple_omp_for_clauses (stmt);
10055 gimple_omp_for_set_clauses (stmt, new_clauses1);
10056 *cp2 = gimple_omp_for_clauses (new_stmt);
10057 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10059 if (is_for_simd)
10061 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10062 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10064 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10065 GSI_SAME_STMT);
10066 gsi_remove (&input3_gsi, true);
10067 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10068 GSI_SAME_STMT);
10069 gsi_remove (&scan3_gsi, true);
10070 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10071 GSI_SAME_STMT);
10072 gsi_remove (&input4_gsi, true);
10073 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10074 GSI_SAME_STMT);
10075 gsi_remove (&scan4_gsi, true);
10077 else
10079 gimple_omp_set_body (scan_stmt1, scan1_list);
10080 gimple_omp_set_body (input_stmt2, input2_list);
10083 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10084 GSI_SAME_STMT);
10085 gsi_remove (&input1_gsi, true);
10086 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10087 GSI_SAME_STMT);
10088 gsi_remove (&scan1_gsi, true);
10089 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10090 GSI_SAME_STMT);
10091 gsi_remove (&input2_gsi, true);
10092 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10093 GSI_SAME_STMT);
10094 gsi_remove (&scan2_gsi, true);
10096 gimple_seq_add_seq (body_p, clist);
10098 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10099 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10100 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10101 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10102 gimple_seq_add_stmt (body_p, g);
10103 g = gimple_build_label (lab1);
10104 gimple_seq_add_stmt (body_p, g);
10105 gimple_seq_add_seq (body_p, thr01_list);
10106 g = gimple_build_goto (lab3);
10107 gimple_seq_add_stmt (body_p, g);
10108 g = gimple_build_label (lab2);
10109 gimple_seq_add_stmt (body_p, g);
10110 gimple_seq_add_seq (body_p, thrn1_list);
10111 g = gimple_build_label (lab3);
10112 gimple_seq_add_stmt (body_p, g);
10114 g = gimple_build_assign (ivar, size_zero_node);
10115 gimple_seq_add_stmt (body_p, g);
10117 gimple_seq_add_stmt (body_p, stmt);
10118 gimple_seq_add_seq (body_p, body);
10119 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10120 fd->loop.v));
10122 g = gimple_build_omp_return (true);
10123 gimple_seq_add_stmt (body_p, g);
10124 gimple_seq_add_seq (body_p, mdlist);
10126 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10127 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10128 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10129 gimple_seq_add_stmt (body_p, g);
10130 g = gimple_build_label (lab1);
10131 gimple_seq_add_stmt (body_p, g);
10133 g = omp_build_barrier (NULL);
10134 gimple_seq_add_stmt (body_p, g);
10136 tree down = create_tmp_var (unsigned_type_node);
10137 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10138 gimple_seq_add_stmt (body_p, g);
10140 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10141 gimple_seq_add_stmt (body_p, g);
10143 tree num_threadsu = create_tmp_var (unsigned_type_node);
10144 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10145 gimple_seq_add_stmt (body_p, g);
10147 tree thread_numu = create_tmp_var (unsigned_type_node);
10148 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10149 gimple_seq_add_stmt (body_p, g);
10151 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10152 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10153 build_int_cst (unsigned_type_node, 1));
10154 gimple_seq_add_stmt (body_p, g);
10156 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10157 g = gimple_build_label (lab3);
10158 gimple_seq_add_stmt (body_p, g);
10160 tree twok = create_tmp_var (unsigned_type_node);
10161 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10162 gimple_seq_add_stmt (body_p, g);
10164 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10165 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10166 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10167 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10168 gimple_seq_add_stmt (body_p, g);
10169 g = gimple_build_label (lab4);
10170 gimple_seq_add_stmt (body_p, g);
10171 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10172 gimple_seq_add_stmt (body_p, g);
10173 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10174 gimple_seq_add_stmt (body_p, g);
10176 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10177 gimple_seq_add_stmt (body_p, g);
10178 g = gimple_build_label (lab6);
10179 gimple_seq_add_stmt (body_p, g);
10181 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10182 gimple_seq_add_stmt (body_p, g);
10184 g = gimple_build_label (lab5);
10185 gimple_seq_add_stmt (body_p, g);
10187 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10188 gimple_seq_add_stmt (body_p, g);
10190 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10191 DECL_GIMPLE_REG_P (cplx) = 1;
10192 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10193 gimple_call_set_lhs (g, cplx);
10194 gimple_seq_add_stmt (body_p, g);
10195 tree mul = create_tmp_var (unsigned_type_node);
10196 g = gimple_build_assign (mul, REALPART_EXPR,
10197 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10198 gimple_seq_add_stmt (body_p, g);
10199 tree ovf = create_tmp_var (unsigned_type_node);
10200 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10201 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10202 gimple_seq_add_stmt (body_p, g);
10204 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10205 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10206 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10207 lab7, lab8);
10208 gimple_seq_add_stmt (body_p, g);
10209 g = gimple_build_label (lab7);
10210 gimple_seq_add_stmt (body_p, g);
10212 tree andv = create_tmp_var (unsigned_type_node);
10213 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10214 gimple_seq_add_stmt (body_p, g);
10215 tree andvm1 = create_tmp_var (unsigned_type_node);
10216 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10217 build_minus_one_cst (unsigned_type_node));
10218 gimple_seq_add_stmt (body_p, g);
10220 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10221 gimple_seq_add_stmt (body_p, g);
10223 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10224 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10225 gimple_seq_add_stmt (body_p, g);
10226 g = gimple_build_label (lab9);
10227 gimple_seq_add_stmt (body_p, g);
10228 gimple_seq_add_seq (body_p, reduc_list);
10229 g = gimple_build_label (lab8);
10230 gimple_seq_add_stmt (body_p, g);
10232 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10233 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10234 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10235 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10236 lab10, lab11);
10237 gimple_seq_add_stmt (body_p, g);
10238 g = gimple_build_label (lab10);
10239 gimple_seq_add_stmt (body_p, g);
10240 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10241 gimple_seq_add_stmt (body_p, g);
10242 g = gimple_build_goto (lab12);
10243 gimple_seq_add_stmt (body_p, g);
10244 g = gimple_build_label (lab11);
10245 gimple_seq_add_stmt (body_p, g);
10246 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10247 gimple_seq_add_stmt (body_p, g);
10248 g = gimple_build_label (lab12);
10249 gimple_seq_add_stmt (body_p, g);
10251 g = omp_build_barrier (NULL);
10252 gimple_seq_add_stmt (body_p, g);
10254 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10255 lab3, lab2);
10256 gimple_seq_add_stmt (body_p, g);
10258 g = gimple_build_label (lab2);
10259 gimple_seq_add_stmt (body_p, g);
10261 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10262 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10263 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10264 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10265 gimple_seq_add_stmt (body_p, g);
10266 g = gimple_build_label (lab1);
10267 gimple_seq_add_stmt (body_p, g);
10268 gimple_seq_add_seq (body_p, thr02_list);
10269 g = gimple_build_goto (lab3);
10270 gimple_seq_add_stmt (body_p, g);
10271 g = gimple_build_label (lab2);
10272 gimple_seq_add_stmt (body_p, g);
10273 gimple_seq_add_seq (body_p, thrn2_list);
10274 g = gimple_build_label (lab3);
10275 gimple_seq_add_stmt (body_p, g);
10277 g = gimple_build_assign (ivar, size_zero_node);
10278 gimple_seq_add_stmt (body_p, g);
10279 gimple_seq_add_stmt (body_p, new_stmt);
10280 gimple_seq_add_seq (body_p, new_body);
10282 gimple_seq new_dlist = NULL;
10283 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10284 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10285 tree num_threadsm1 = create_tmp_var (integer_type_node);
10286 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10287 integer_minus_one_node);
10288 gimple_seq_add_stmt (&new_dlist, g);
10289 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10290 gimple_seq_add_stmt (&new_dlist, g);
10291 g = gimple_build_label (lab1);
10292 gimple_seq_add_stmt (&new_dlist, g);
10293 gimple_seq_add_seq (&new_dlist, last_list);
10294 g = gimple_build_label (lab2);
10295 gimple_seq_add_stmt (&new_dlist, g);
10296 gimple_seq_add_seq (&new_dlist, *dlist);
10297 *dlist = new_dlist;
10300 /* Lower code for an OMP loop directive. */
10302 static void
10303 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10305 tree *rhs_p, block;
10306 struct omp_for_data fd, *fdp = NULL;
10307 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10308 gbind *new_stmt;
10309 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10310 gimple_seq cnt_list = NULL, clist = NULL;
10311 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10312 size_t i;
10314 push_gimplify_context ();
10316 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10318 block = make_node (BLOCK);
10319 new_stmt = gimple_build_bind (NULL, NULL, block);
10320 /* Replace at gsi right away, so that 'stmt' is no member
10321 of a sequence anymore as we're going to add to a different
10322 one below. */
10323 gsi_replace (gsi_p, new_stmt, true);
10325 /* Move declaration of temporaries in the loop body before we make
10326 it go away. */
10327 omp_for_body = gimple_omp_body (stmt);
10328 if (!gimple_seq_empty_p (omp_for_body)
10329 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10331 gbind *inner_bind
10332 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10333 tree vars = gimple_bind_vars (inner_bind);
10334 gimple_bind_append_vars (new_stmt, vars);
10335 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10336 keep them on the inner_bind and it's block. */
10337 gimple_bind_set_vars (inner_bind, NULL_TREE);
10338 if (gimple_bind_block (inner_bind))
10339 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10342 if (gimple_omp_for_combined_into_p (stmt))
10344 omp_extract_for_data (stmt, &fd, NULL);
10345 fdp = &fd;
10347 /* We need two temporaries with fd.loop.v type (istart/iend)
10348 and then (fd.collapse - 1) temporaries with the same
10349 type for count2 ... countN-1 vars if not constant. */
10350 size_t count = 2;
10351 tree type = fd.iter_type;
10352 if (fd.collapse > 1
10353 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10354 count += fd.collapse - 1;
10355 bool taskreg_for
10356 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10357 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10358 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10359 tree simtc = NULL;
10360 tree clauses = *pc;
10361 if (taskreg_for)
10362 outerc
10363 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10364 OMP_CLAUSE__LOOPTEMP_);
10365 if (ctx->simt_stmt)
10366 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10367 OMP_CLAUSE__LOOPTEMP_);
10368 for (i = 0; i < count; i++)
10370 tree temp;
10371 if (taskreg_for)
10373 gcc_assert (outerc);
10374 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10375 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10376 OMP_CLAUSE__LOOPTEMP_);
10378 else
10380 /* If there are 2 adjacent SIMD stmts, one with _simt_
10381 clause, another without, make sure they have the same
10382 decls in _looptemp_ clauses, because the outer stmt
10383 they are combined into will look up just one inner_stmt. */
10384 if (ctx->simt_stmt)
10385 temp = OMP_CLAUSE_DECL (simtc);
10386 else
10387 temp = create_tmp_var (type);
10388 insert_decl_map (&ctx->outer->cb, temp, temp);
10390 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10391 OMP_CLAUSE_DECL (*pc) = temp;
10392 pc = &OMP_CLAUSE_CHAIN (*pc);
10393 if (ctx->simt_stmt)
10394 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10395 OMP_CLAUSE__LOOPTEMP_);
10397 *pc = clauses;
10400 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10401 dlist = NULL;
10402 body = NULL;
10403 tree rclauses
10404 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10405 OMP_CLAUSE_REDUCTION);
10406 tree rtmp = NULL_TREE;
10407 if (rclauses)
10409 tree type = build_pointer_type (pointer_sized_int_node);
10410 tree temp = create_tmp_var (type);
10411 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10412 OMP_CLAUSE_DECL (c) = temp;
10413 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10414 gimple_omp_for_set_clauses (stmt, c);
10415 lower_omp_task_reductions (ctx, OMP_FOR,
10416 gimple_omp_for_clauses (stmt),
10417 &tred_ilist, &tred_dlist);
10418 rclauses = c;
10419 rtmp = make_ssa_name (type);
10420 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10423 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10424 ctx);
10426 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10427 fdp);
10428 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10429 gimple_omp_for_pre_body (stmt));
10431 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10433 /* Lower the header expressions. At this point, we can assume that
10434 the header is of the form:
10436 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10438 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10439 using the .omp_data_s mapping, if needed. */
10440 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10442 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10443 if (!is_gimple_min_invariant (*rhs_p))
10444 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10445 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10446 recompute_tree_invariant_for_addr_expr (*rhs_p);
10448 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10449 if (!is_gimple_min_invariant (*rhs_p))
10450 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10451 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10452 recompute_tree_invariant_for_addr_expr (*rhs_p);
10454 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10455 if (!is_gimple_min_invariant (*rhs_p))
10456 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10458 if (rclauses)
10459 gimple_seq_add_seq (&tred_ilist, cnt_list);
10460 else
10461 gimple_seq_add_seq (&body, cnt_list);
10463 /* Once lowered, extract the bounds and clauses. */
10464 omp_extract_for_data (stmt, &fd, NULL);
10466 if (is_gimple_omp_oacc (ctx->stmt)
10467 && !ctx_in_oacc_kernels_region (ctx))
10468 lower_oacc_head_tail (gimple_location (stmt),
10469 gimple_omp_for_clauses (stmt),
10470 &oacc_head, &oacc_tail, ctx);
10472 /* Add OpenACC partitioning and reduction markers just before the loop. */
10473 if (oacc_head)
10474 gimple_seq_add_seq (&body, oacc_head);
10476 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10478 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10479 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10480 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10481 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10483 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10484 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10485 OMP_CLAUSE_LINEAR_STEP (c)
10486 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10487 ctx);
10490 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10491 && gimple_omp_for_grid_phony (stmt));
10492 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10493 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10495 gcc_assert (!phony_loop);
10496 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10498 else
10500 if (!phony_loop)
10501 gimple_seq_add_stmt (&body, stmt);
10502 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10505 if (!phony_loop)
10506 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10507 fd.loop.v));
10509 /* After the loop, add exit clauses. */
10510 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10512 if (clist)
10514 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10515 gcall *g = gimple_build_call (fndecl, 0);
10516 gimple_seq_add_stmt (&body, g);
10517 gimple_seq_add_seq (&body, clist);
10518 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10519 g = gimple_build_call (fndecl, 0);
10520 gimple_seq_add_stmt (&body, g);
10523 if (ctx->cancellable)
10524 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10526 gimple_seq_add_seq (&body, dlist);
10528 if (rclauses)
10530 gimple_seq_add_seq (&tred_ilist, body);
10531 body = tred_ilist;
10534 body = maybe_catch_exception (body);
10536 if (!phony_loop)
10538 /* Region exit marker goes at the end of the loop body. */
10539 gimple *g = gimple_build_omp_return (fd.have_nowait);
10540 gimple_seq_add_stmt (&body, g);
10542 gimple_seq_add_seq (&body, tred_dlist);
10544 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10546 if (rclauses)
10547 OMP_CLAUSE_DECL (rclauses) = rtmp;
10550 /* Add OpenACC joining and reduction markers just after the loop. */
10551 if (oacc_tail)
10552 gimple_seq_add_seq (&body, oacc_tail);
10554 pop_gimplify_context (new_stmt);
10556 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10557 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10558 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10559 if (BLOCK_VARS (block))
10560 TREE_USED (block) = 1;
10562 gimple_bind_set_body (new_stmt, body);
10563 gimple_omp_set_body (stmt, NULL);
10564 gimple_omp_for_set_pre_body (stmt, NULL);
10567 /* Callback for walk_stmts. Check if the current statement only contains
10568 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10570 static tree
10571 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10572 bool *handled_ops_p,
10573 struct walk_stmt_info *wi)
10575 int *info = (int *) wi->info;
10576 gimple *stmt = gsi_stmt (*gsi_p);
10578 *handled_ops_p = true;
10579 switch (gimple_code (stmt))
10581 WALK_SUBSTMTS;
10583 case GIMPLE_DEBUG:
10584 break;
10585 case GIMPLE_OMP_FOR:
10586 case GIMPLE_OMP_SECTIONS:
10587 *info = *info == 0 ? 1 : -1;
10588 break;
10589 default:
10590 *info = -1;
10591 break;
10593 return NULL;
10596 struct omp_taskcopy_context
10598 /* This field must be at the beginning, as we do "inheritance": Some
10599 callback functions for tree-inline.c (e.g., omp_copy_decl)
10600 receive a copy_body_data pointer that is up-casted to an
10601 omp_context pointer. */
10602 copy_body_data cb;
10603 omp_context *ctx;
10606 static tree
10607 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10609 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10611 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10612 return create_tmp_var (TREE_TYPE (var));
10614 return var;
10617 static tree
10618 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10620 tree name, new_fields = NULL, type, f;
10622 type = lang_hooks.types.make_type (RECORD_TYPE);
10623 name = DECL_NAME (TYPE_NAME (orig_type));
10624 name = build_decl (gimple_location (tcctx->ctx->stmt),
10625 TYPE_DECL, name, type);
10626 TYPE_NAME (type) = name;
10628 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10630 tree new_f = copy_node (f);
10631 DECL_CONTEXT (new_f) = type;
10632 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10633 TREE_CHAIN (new_f) = new_fields;
10634 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10635 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10636 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10637 &tcctx->cb, NULL);
10638 new_fields = new_f;
10639 tcctx->cb.decl_map->put (f, new_f);
10641 TYPE_FIELDS (type) = nreverse (new_fields);
10642 layout_type (type);
10643 return type;
10646 /* Create task copyfn. */
10648 static void
10649 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10651 struct function *child_cfun;
10652 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10653 tree record_type, srecord_type, bind, list;
10654 bool record_needs_remap = false, srecord_needs_remap = false;
10655 splay_tree_node n;
10656 struct omp_taskcopy_context tcctx;
10657 location_t loc = gimple_location (task_stmt);
10658 size_t looptempno = 0;
10660 child_fn = gimple_omp_task_copy_fn (task_stmt);
10661 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10662 gcc_assert (child_cfun->cfg == NULL);
10663 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10665 /* Reset DECL_CONTEXT on function arguments. */
10666 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10667 DECL_CONTEXT (t) = child_fn;
10669 /* Populate the function. */
10670 push_gimplify_context ();
10671 push_cfun (child_cfun);
10673 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10674 TREE_SIDE_EFFECTS (bind) = 1;
10675 list = NULL;
10676 DECL_SAVED_TREE (child_fn) = bind;
10677 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10679 /* Remap src and dst argument types if needed. */
10680 record_type = ctx->record_type;
10681 srecord_type = ctx->srecord_type;
10682 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10683 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10685 record_needs_remap = true;
10686 break;
10688 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10689 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10691 srecord_needs_remap = true;
10692 break;
10695 if (record_needs_remap || srecord_needs_remap)
10697 memset (&tcctx, '\0', sizeof (tcctx));
10698 tcctx.cb.src_fn = ctx->cb.src_fn;
10699 tcctx.cb.dst_fn = child_fn;
10700 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10701 gcc_checking_assert (tcctx.cb.src_node);
10702 tcctx.cb.dst_node = tcctx.cb.src_node;
10703 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10704 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10705 tcctx.cb.eh_lp_nr = 0;
10706 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10707 tcctx.cb.decl_map = new hash_map<tree, tree>;
10708 tcctx.ctx = ctx;
10710 if (record_needs_remap)
10711 record_type = task_copyfn_remap_type (&tcctx, record_type);
10712 if (srecord_needs_remap)
10713 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10715 else
10716 tcctx.cb.decl_map = NULL;
10718 arg = DECL_ARGUMENTS (child_fn);
10719 TREE_TYPE (arg) = build_pointer_type (record_type);
10720 sarg = DECL_CHAIN (arg);
10721 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10723 /* First pass: initialize temporaries used in record_type and srecord_type
10724 sizes and field offsets. */
10725 if (tcctx.cb.decl_map)
10726 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10729 tree *p;
10731 decl = OMP_CLAUSE_DECL (c);
10732 p = tcctx.cb.decl_map->get (decl);
10733 if (p == NULL)
10734 continue;
10735 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10736 sf = (tree) n->value;
10737 sf = *tcctx.cb.decl_map->get (sf);
10738 src = build_simple_mem_ref_loc (loc, sarg);
10739 src = omp_build_component_ref (src, sf);
10740 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10741 append_to_statement_list (t, &list);
10744 /* Second pass: copy shared var pointers and copy construct non-VLA
10745 firstprivate vars. */
10746 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10747 switch (OMP_CLAUSE_CODE (c))
10749 splay_tree_key key;
10750 case OMP_CLAUSE_SHARED:
10751 decl = OMP_CLAUSE_DECL (c);
10752 key = (splay_tree_key) decl;
10753 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10754 key = (splay_tree_key) &DECL_UID (decl);
10755 n = splay_tree_lookup (ctx->field_map, key);
10756 if (n == NULL)
10757 break;
10758 f = (tree) n->value;
10759 if (tcctx.cb.decl_map)
10760 f = *tcctx.cb.decl_map->get (f);
10761 n = splay_tree_lookup (ctx->sfield_map, key);
10762 sf = (tree) n->value;
10763 if (tcctx.cb.decl_map)
10764 sf = *tcctx.cb.decl_map->get (sf);
10765 src = build_simple_mem_ref_loc (loc, sarg);
10766 src = omp_build_component_ref (src, sf);
10767 dst = build_simple_mem_ref_loc (loc, arg);
10768 dst = omp_build_component_ref (dst, f);
10769 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10770 append_to_statement_list (t, &list);
10771 break;
10772 case OMP_CLAUSE_REDUCTION:
10773 case OMP_CLAUSE_IN_REDUCTION:
10774 decl = OMP_CLAUSE_DECL (c);
10775 if (TREE_CODE (decl) == MEM_REF)
10777 decl = TREE_OPERAND (decl, 0);
10778 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10779 decl = TREE_OPERAND (decl, 0);
10780 if (TREE_CODE (decl) == INDIRECT_REF
10781 || TREE_CODE (decl) == ADDR_EXPR)
10782 decl = TREE_OPERAND (decl, 0);
10784 key = (splay_tree_key) decl;
10785 n = splay_tree_lookup (ctx->field_map, key);
10786 if (n == NULL)
10787 break;
10788 f = (tree) n->value;
10789 if (tcctx.cb.decl_map)
10790 f = *tcctx.cb.decl_map->get (f);
10791 n = splay_tree_lookup (ctx->sfield_map, key);
10792 sf = (tree) n->value;
10793 if (tcctx.cb.decl_map)
10794 sf = *tcctx.cb.decl_map->get (sf);
10795 src = build_simple_mem_ref_loc (loc, sarg);
10796 src = omp_build_component_ref (src, sf);
10797 if (decl != OMP_CLAUSE_DECL (c)
10798 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10799 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10800 src = build_simple_mem_ref_loc (loc, src);
10801 dst = build_simple_mem_ref_loc (loc, arg);
10802 dst = omp_build_component_ref (dst, f);
10803 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10804 append_to_statement_list (t, &list);
10805 break;
10806 case OMP_CLAUSE__LOOPTEMP_:
10807 /* Fields for first two _looptemp_ clauses are initialized by
10808 GOMP_taskloop*, the rest are handled like firstprivate. */
10809 if (looptempno < 2)
10811 looptempno++;
10812 break;
10814 /* FALLTHRU */
10815 case OMP_CLAUSE__REDUCTEMP_:
10816 case OMP_CLAUSE_FIRSTPRIVATE:
10817 decl = OMP_CLAUSE_DECL (c);
10818 if (is_variable_sized (decl))
10819 break;
10820 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10821 if (n == NULL)
10822 break;
10823 f = (tree) n->value;
10824 if (tcctx.cb.decl_map)
10825 f = *tcctx.cb.decl_map->get (f);
10826 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10827 if (n != NULL)
10829 sf = (tree) n->value;
10830 if (tcctx.cb.decl_map)
10831 sf = *tcctx.cb.decl_map->get (sf);
10832 src = build_simple_mem_ref_loc (loc, sarg);
10833 src = omp_build_component_ref (src, sf);
10834 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10835 src = build_simple_mem_ref_loc (loc, src);
10837 else
10838 src = decl;
10839 dst = build_simple_mem_ref_loc (loc, arg);
10840 dst = omp_build_component_ref (dst, f);
10841 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10842 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10843 else
10844 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10845 append_to_statement_list (t, &list);
10846 break;
10847 case OMP_CLAUSE_PRIVATE:
10848 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10849 break;
10850 decl = OMP_CLAUSE_DECL (c);
10851 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10852 f = (tree) n->value;
10853 if (tcctx.cb.decl_map)
10854 f = *tcctx.cb.decl_map->get (f);
10855 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10856 if (n != NULL)
10858 sf = (tree) n->value;
10859 if (tcctx.cb.decl_map)
10860 sf = *tcctx.cb.decl_map->get (sf);
10861 src = build_simple_mem_ref_loc (loc, sarg);
10862 src = omp_build_component_ref (src, sf);
10863 if (use_pointer_for_field (decl, NULL))
10864 src = build_simple_mem_ref_loc (loc, src);
10866 else
10867 src = decl;
10868 dst = build_simple_mem_ref_loc (loc, arg);
10869 dst = omp_build_component_ref (dst, f);
10870 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10871 append_to_statement_list (t, &list);
10872 break;
10873 default:
10874 break;
10877 /* Last pass: handle VLA firstprivates. */
10878 if (tcctx.cb.decl_map)
10879 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10880 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10882 tree ind, ptr, df;
10884 decl = OMP_CLAUSE_DECL (c);
10885 if (!is_variable_sized (decl))
10886 continue;
10887 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10888 if (n == NULL)
10889 continue;
10890 f = (tree) n->value;
10891 f = *tcctx.cb.decl_map->get (f);
10892 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10893 ind = DECL_VALUE_EXPR (decl);
10894 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10895 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10896 n = splay_tree_lookup (ctx->sfield_map,
10897 (splay_tree_key) TREE_OPERAND (ind, 0));
10898 sf = (tree) n->value;
10899 sf = *tcctx.cb.decl_map->get (sf);
10900 src = build_simple_mem_ref_loc (loc, sarg);
10901 src = omp_build_component_ref (src, sf);
10902 src = build_simple_mem_ref_loc (loc, src);
10903 dst = build_simple_mem_ref_loc (loc, arg);
10904 dst = omp_build_component_ref (dst, f);
10905 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10906 append_to_statement_list (t, &list);
10907 n = splay_tree_lookup (ctx->field_map,
10908 (splay_tree_key) TREE_OPERAND (ind, 0));
10909 df = (tree) n->value;
10910 df = *tcctx.cb.decl_map->get (df);
10911 ptr = build_simple_mem_ref_loc (loc, arg);
10912 ptr = omp_build_component_ref (ptr, df);
10913 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10914 build_fold_addr_expr_loc (loc, dst));
10915 append_to_statement_list (t, &list);
10918 t = build1 (RETURN_EXPR, void_type_node, NULL);
10919 append_to_statement_list (t, &list);
10921 if (tcctx.cb.decl_map)
10922 delete tcctx.cb.decl_map;
10923 pop_gimplify_context (NULL);
10924 BIND_EXPR_BODY (bind) = list;
10925 pop_cfun ();
10928 static void
10929 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
10931 tree c, clauses;
10932 gimple *g;
10933 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
10935 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10936 gcc_assert (clauses);
10937 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10938 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10939 switch (OMP_CLAUSE_DEPEND_KIND (c))
10941 case OMP_CLAUSE_DEPEND_LAST:
10942 /* Lowering already done at gimplification. */
10943 return;
10944 case OMP_CLAUSE_DEPEND_IN:
10945 cnt[2]++;
10946 break;
10947 case OMP_CLAUSE_DEPEND_OUT:
10948 case OMP_CLAUSE_DEPEND_INOUT:
10949 cnt[0]++;
10950 break;
10951 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10952 cnt[1]++;
10953 break;
10954 case OMP_CLAUSE_DEPEND_DEPOBJ:
10955 cnt[3]++;
10956 break;
10957 case OMP_CLAUSE_DEPEND_SOURCE:
10958 case OMP_CLAUSE_DEPEND_SINK:
10959 /* FALLTHRU */
10960 default:
10961 gcc_unreachable ();
10963 if (cnt[1] || cnt[3])
10964 idx = 5;
10965 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10966 tree type = build_array_type_nelts (ptr_type_node, total + idx);
10967 tree array = create_tmp_var (type);
10968 TREE_ADDRESSABLE (array) = 1;
10969 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10970 NULL_TREE);
10971 if (idx == 5)
10973 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10974 gimple_seq_add_stmt (iseq, g);
10975 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10976 NULL_TREE);
10978 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
10979 gimple_seq_add_stmt (iseq, g);
10980 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10982 r = build4 (ARRAY_REF, ptr_type_node, array,
10983 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10984 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10985 gimple_seq_add_stmt (iseq, g);
10987 for (i = 0; i < 4; i++)
10989 if (cnt[i] == 0)
10990 continue;
10991 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10992 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
10993 continue;
10994 else
10996 switch (OMP_CLAUSE_DEPEND_KIND (c))
10998 case OMP_CLAUSE_DEPEND_IN:
10999 if (i != 2)
11000 continue;
11001 break;
11002 case OMP_CLAUSE_DEPEND_OUT:
11003 case OMP_CLAUSE_DEPEND_INOUT:
11004 if (i != 0)
11005 continue;
11006 break;
11007 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11008 if (i != 1)
11009 continue;
11010 break;
11011 case OMP_CLAUSE_DEPEND_DEPOBJ:
11012 if (i != 3)
11013 continue;
11014 break;
11015 default:
11016 gcc_unreachable ();
11018 tree t = OMP_CLAUSE_DECL (c);
11019 t = fold_convert (ptr_type_node, t);
11020 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11021 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11022 NULL_TREE, NULL_TREE);
11023 g = gimple_build_assign (r, t);
11024 gimple_seq_add_stmt (iseq, g);
11027 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11028 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11029 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11030 OMP_CLAUSE_CHAIN (c) = *pclauses;
11031 *pclauses = c;
11032 tree clobber = build_constructor (type, NULL);
11033 TREE_THIS_VOLATILE (clobber) = 1;
11034 g = gimple_build_assign (array, clobber);
11035 gimple_seq_add_stmt (oseq, g);
11038 /* Lower the OpenMP parallel or task directive in the current statement
11039 in GSI_P. CTX holds context information for the directive. */
11041 static void
11042 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11044 tree clauses;
11045 tree child_fn, t;
11046 gimple *stmt = gsi_stmt (*gsi_p);
11047 gbind *par_bind, *bind, *dep_bind = NULL;
11048 gimple_seq par_body;
11049 location_t loc = gimple_location (stmt);
11051 clauses = gimple_omp_taskreg_clauses (stmt);
11052 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11053 && gimple_omp_task_taskwait_p (stmt))
11055 par_bind = NULL;
11056 par_body = NULL;
11058 else
11060 par_bind
11061 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11062 par_body = gimple_bind_body (par_bind);
11064 child_fn = ctx->cb.dst_fn;
11065 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11066 && !gimple_omp_parallel_combined_p (stmt))
11068 struct walk_stmt_info wi;
11069 int ws_num = 0;
11071 memset (&wi, 0, sizeof (wi));
11072 wi.info = &ws_num;
11073 wi.val_only = true;
11074 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11075 if (ws_num == 1)
11076 gimple_omp_parallel_set_combined_p (stmt, true);
11078 gimple_seq dep_ilist = NULL;
11079 gimple_seq dep_olist = NULL;
11080 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11081 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11083 push_gimplify_context ();
11084 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11085 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11086 &dep_ilist, &dep_olist);
11089 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11090 && gimple_omp_task_taskwait_p (stmt))
11092 if (dep_bind)
11094 gsi_replace (gsi_p, dep_bind, true);
11095 gimple_bind_add_seq (dep_bind, dep_ilist);
11096 gimple_bind_add_stmt (dep_bind, stmt);
11097 gimple_bind_add_seq (dep_bind, dep_olist);
11098 pop_gimplify_context (dep_bind);
11100 return;
11103 if (ctx->srecord_type)
11104 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11106 gimple_seq tskred_ilist = NULL;
11107 gimple_seq tskred_olist = NULL;
11108 if ((is_task_ctx (ctx)
11109 && gimple_omp_task_taskloop_p (ctx->stmt)
11110 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11111 OMP_CLAUSE_REDUCTION))
11112 || (is_parallel_ctx (ctx)
11113 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11114 OMP_CLAUSE__REDUCTEMP_)))
11116 if (dep_bind == NULL)
11118 push_gimplify_context ();
11119 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11121 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11122 : OMP_PARALLEL,
11123 gimple_omp_taskreg_clauses (ctx->stmt),
11124 &tskred_ilist, &tskred_olist);
11127 push_gimplify_context ();
11129 gimple_seq par_olist = NULL;
11130 gimple_seq par_ilist = NULL;
11131 gimple_seq par_rlist = NULL;
11132 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11133 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11134 if (phony_construct && ctx->record_type)
11136 gcc_checking_assert (!ctx->receiver_decl);
11137 ctx->receiver_decl = create_tmp_var
11138 (build_reference_type (ctx->record_type), ".omp_rec");
11140 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11141 lower_omp (&par_body, ctx);
11142 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
11143 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11145 /* Declare all the variables created by mapping and the variables
11146 declared in the scope of the parallel body. */
11147 record_vars_into (ctx->block_vars, child_fn);
11148 maybe_remove_omp_member_access_dummy_vars (par_bind);
11149 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11151 if (ctx->record_type)
11153 ctx->sender_decl
11154 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11155 : ctx->record_type, ".omp_data_o");
11156 DECL_NAMELESS (ctx->sender_decl) = 1;
11157 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11158 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11161 gimple_seq olist = NULL;
11162 gimple_seq ilist = NULL;
11163 lower_send_clauses (clauses, &ilist, &olist, ctx);
11164 lower_send_shared_vars (&ilist, &olist, ctx);
11166 if (ctx->record_type)
11168 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
11169 TREE_THIS_VOLATILE (clobber) = 1;
11170 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11171 clobber));
11174 /* Once all the expansions are done, sequence all the different
11175 fragments inside gimple_omp_body. */
11177 gimple_seq new_body = NULL;
11179 if (ctx->record_type)
11181 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11182 /* fixup_child_record_type might have changed receiver_decl's type. */
11183 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11184 gimple_seq_add_stmt (&new_body,
11185 gimple_build_assign (ctx->receiver_decl, t));
11188 gimple_seq_add_seq (&new_body, par_ilist);
11189 gimple_seq_add_seq (&new_body, par_body);
11190 gimple_seq_add_seq (&new_body, par_rlist);
11191 if (ctx->cancellable)
11192 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11193 gimple_seq_add_seq (&new_body, par_olist);
11194 new_body = maybe_catch_exception (new_body);
11195 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11196 gimple_seq_add_stmt (&new_body,
11197 gimple_build_omp_continue (integer_zero_node,
11198 integer_zero_node));
11199 if (!phony_construct)
11201 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11202 gimple_omp_set_body (stmt, new_body);
11205 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11206 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11207 else
11208 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11209 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11210 gimple_bind_add_seq (bind, ilist);
11211 if (!phony_construct)
11212 gimple_bind_add_stmt (bind, stmt);
11213 else
11214 gimple_bind_add_seq (bind, new_body);
11215 gimple_bind_add_seq (bind, olist);
11217 pop_gimplify_context (NULL);
11219 if (dep_bind)
11221 gimple_bind_add_seq (dep_bind, dep_ilist);
11222 gimple_bind_add_seq (dep_bind, tskred_ilist);
11223 gimple_bind_add_stmt (dep_bind, bind);
11224 gimple_bind_add_seq (dep_bind, tskred_olist);
11225 gimple_bind_add_seq (dep_bind, dep_olist);
11226 pop_gimplify_context (dep_bind);
11230 /* Lower the GIMPLE_OMP_TARGET in the current statement
11231 in GSI_P. CTX holds context information for the directive. */
11233 static void
11234 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11236 tree clauses;
11237 tree child_fn, t, c;
11238 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11239 gbind *tgt_bind, *bind, *dep_bind = NULL;
11240 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11241 location_t loc = gimple_location (stmt);
11242 bool offloaded, data_region;
11243 unsigned int map_cnt = 0;
11245 offloaded = is_gimple_omp_offloaded (stmt);
11246 switch (gimple_omp_target_kind (stmt))
11248 case GF_OMP_TARGET_KIND_REGION:
11249 case GF_OMP_TARGET_KIND_UPDATE:
11250 case GF_OMP_TARGET_KIND_ENTER_DATA:
11251 case GF_OMP_TARGET_KIND_EXIT_DATA:
11252 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11253 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11254 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11255 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11256 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11257 data_region = false;
11258 break;
11259 case GF_OMP_TARGET_KIND_DATA:
11260 case GF_OMP_TARGET_KIND_OACC_DATA:
11261 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11262 data_region = true;
11263 break;
11264 default:
11265 gcc_unreachable ();
11268 clauses = gimple_omp_target_clauses (stmt);
11270 gimple_seq dep_ilist = NULL;
11271 gimple_seq dep_olist = NULL;
11272 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11274 push_gimplify_context ();
11275 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11276 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11277 &dep_ilist, &dep_olist);
11280 tgt_bind = NULL;
11281 tgt_body = NULL;
11282 if (offloaded)
11284 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11285 tgt_body = gimple_bind_body (tgt_bind);
11287 else if (data_region)
11288 tgt_body = gimple_omp_body (stmt);
11289 child_fn = ctx->cb.dst_fn;
11291 push_gimplify_context ();
11292 fplist = NULL;
11294 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11295 switch (OMP_CLAUSE_CODE (c))
11297 tree var, x;
11299 default:
11300 break;
11301 case OMP_CLAUSE_MAP:
11302 #if CHECKING_P
11303 /* First check what we're prepared to handle in the following. */
11304 switch (OMP_CLAUSE_MAP_KIND (c))
11306 case GOMP_MAP_ALLOC:
11307 case GOMP_MAP_TO:
11308 case GOMP_MAP_FROM:
11309 case GOMP_MAP_TOFROM:
11310 case GOMP_MAP_POINTER:
11311 case GOMP_MAP_TO_PSET:
11312 case GOMP_MAP_DELETE:
11313 case GOMP_MAP_RELEASE:
11314 case GOMP_MAP_ALWAYS_TO:
11315 case GOMP_MAP_ALWAYS_FROM:
11316 case GOMP_MAP_ALWAYS_TOFROM:
11317 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11318 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11319 case GOMP_MAP_STRUCT:
11320 case GOMP_MAP_ALWAYS_POINTER:
11321 break;
11322 case GOMP_MAP_FORCE_ALLOC:
11323 case GOMP_MAP_FORCE_TO:
11324 case GOMP_MAP_FORCE_FROM:
11325 case GOMP_MAP_FORCE_TOFROM:
11326 case GOMP_MAP_FORCE_PRESENT:
11327 case GOMP_MAP_FORCE_DEVICEPTR:
11328 case GOMP_MAP_DEVICE_RESIDENT:
11329 case GOMP_MAP_LINK:
11330 gcc_assert (is_gimple_omp_oacc (stmt));
11331 break;
11332 default:
11333 gcc_unreachable ();
11335 #endif
11336 /* FALLTHRU */
11337 case OMP_CLAUSE_TO:
11338 case OMP_CLAUSE_FROM:
11339 oacc_firstprivate:
11340 var = OMP_CLAUSE_DECL (c);
11341 if (!DECL_P (var))
11343 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11344 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11345 && (OMP_CLAUSE_MAP_KIND (c)
11346 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11347 map_cnt++;
11348 continue;
11351 if (DECL_SIZE (var)
11352 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11354 tree var2 = DECL_VALUE_EXPR (var);
11355 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11356 var2 = TREE_OPERAND (var2, 0);
11357 gcc_assert (DECL_P (var2));
11358 var = var2;
11361 if (offloaded
11362 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11363 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11364 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11366 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11368 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11369 && varpool_node::get_create (var)->offloadable)
11370 continue;
11372 tree type = build_pointer_type (TREE_TYPE (var));
11373 tree new_var = lookup_decl (var, ctx);
11374 x = create_tmp_var_raw (type, get_name (new_var));
11375 gimple_add_tmp_var (x);
11376 x = build_simple_mem_ref (x);
11377 SET_DECL_VALUE_EXPR (new_var, x);
11378 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11380 continue;
11383 if (!maybe_lookup_field (var, ctx))
11384 continue;
11386 /* Don't remap oacc parallel reduction variables, because the
11387 intermediate result must be local to each gang. */
11388 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11389 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11391 x = build_receiver_ref (var, true, ctx);
11392 tree new_var = lookup_decl (var, ctx);
11394 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11395 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11396 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11397 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11398 x = build_simple_mem_ref (x);
11399 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11401 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11402 if (omp_is_reference (new_var)
11403 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
11405 /* Create a local object to hold the instance
11406 value. */
11407 tree type = TREE_TYPE (TREE_TYPE (new_var));
11408 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11409 tree inst = create_tmp_var (type, id);
11410 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11411 x = build_fold_addr_expr (inst);
11413 gimplify_assign (new_var, x, &fplist);
11415 else if (DECL_P (new_var))
11417 SET_DECL_VALUE_EXPR (new_var, x);
11418 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11420 else
11421 gcc_unreachable ();
11423 map_cnt++;
11424 break;
11426 case OMP_CLAUSE_FIRSTPRIVATE:
11427 if (is_oacc_parallel (ctx))
11428 goto oacc_firstprivate;
11429 map_cnt++;
11430 var = OMP_CLAUSE_DECL (c);
11431 if (!omp_is_reference (var)
11432 && !is_gimple_reg_type (TREE_TYPE (var)))
11434 tree new_var = lookup_decl (var, ctx);
11435 if (is_variable_sized (var))
11437 tree pvar = DECL_VALUE_EXPR (var);
11438 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11439 pvar = TREE_OPERAND (pvar, 0);
11440 gcc_assert (DECL_P (pvar));
11441 tree new_pvar = lookup_decl (pvar, ctx);
11442 x = build_fold_indirect_ref (new_pvar);
11443 TREE_THIS_NOTRAP (x) = 1;
11445 else
11446 x = build_receiver_ref (var, true, ctx);
11447 SET_DECL_VALUE_EXPR (new_var, x);
11448 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11450 break;
11452 case OMP_CLAUSE_PRIVATE:
11453 if (is_gimple_omp_oacc (ctx->stmt))
11454 break;
11455 var = OMP_CLAUSE_DECL (c);
11456 if (is_variable_sized (var))
11458 tree new_var = lookup_decl (var, ctx);
11459 tree pvar = DECL_VALUE_EXPR (var);
11460 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11461 pvar = TREE_OPERAND (pvar, 0);
11462 gcc_assert (DECL_P (pvar));
11463 tree new_pvar = lookup_decl (pvar, ctx);
11464 x = build_fold_indirect_ref (new_pvar);
11465 TREE_THIS_NOTRAP (x) = 1;
11466 SET_DECL_VALUE_EXPR (new_var, x);
11467 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11469 break;
11471 case OMP_CLAUSE_USE_DEVICE_PTR:
11472 case OMP_CLAUSE_USE_DEVICE_ADDR:
11473 case OMP_CLAUSE_IS_DEVICE_PTR:
11474 var = OMP_CLAUSE_DECL (c);
11475 map_cnt++;
11476 if (is_variable_sized (var))
11478 tree new_var = lookup_decl (var, ctx);
11479 tree pvar = DECL_VALUE_EXPR (var);
11480 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11481 pvar = TREE_OPERAND (pvar, 0);
11482 gcc_assert (DECL_P (pvar));
11483 tree new_pvar = lookup_decl (pvar, ctx);
11484 x = build_fold_indirect_ref (new_pvar);
11485 TREE_THIS_NOTRAP (x) = 1;
11486 SET_DECL_VALUE_EXPR (new_var, x);
11487 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11489 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11490 && !omp_is_reference (var))
11491 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11493 tree new_var = lookup_decl (var, ctx);
11494 tree type = build_pointer_type (TREE_TYPE (var));
11495 x = create_tmp_var_raw (type, get_name (new_var));
11496 gimple_add_tmp_var (x);
11497 x = build_simple_mem_ref (x);
11498 SET_DECL_VALUE_EXPR (new_var, x);
11499 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11501 else
11503 tree new_var = lookup_decl (var, ctx);
11504 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11505 gimple_add_tmp_var (x);
11506 SET_DECL_VALUE_EXPR (new_var, x);
11507 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11509 break;
11512 if (offloaded)
11514 target_nesting_level++;
11515 lower_omp (&tgt_body, ctx);
11516 target_nesting_level--;
11518 else if (data_region)
11519 lower_omp (&tgt_body, ctx);
11521 if (offloaded)
11523 /* Declare all the variables created by mapping and the variables
11524 declared in the scope of the target body. */
11525 record_vars_into (ctx->block_vars, child_fn);
11526 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11527 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11530 olist = NULL;
11531 ilist = NULL;
11532 if (ctx->record_type)
11534 ctx->sender_decl
11535 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11536 DECL_NAMELESS (ctx->sender_decl) = 1;
11537 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11538 t = make_tree_vec (3);
11539 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11540 TREE_VEC_ELT (t, 1)
11541 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11542 ".omp_data_sizes");
11543 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11544 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11545 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11546 tree tkind_type = short_unsigned_type_node;
11547 int talign_shift = 8;
11548 TREE_VEC_ELT (t, 2)
11549 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11550 ".omp_data_kinds");
11551 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11552 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11553 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11554 gimple_omp_target_set_data_arg (stmt, t);
11556 vec<constructor_elt, va_gc> *vsize;
11557 vec<constructor_elt, va_gc> *vkind;
11558 vec_alloc (vsize, map_cnt);
11559 vec_alloc (vkind, map_cnt);
11560 unsigned int map_idx = 0;
11562 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11563 switch (OMP_CLAUSE_CODE (c))
11565 tree ovar, nc, s, purpose, var, x, type;
11566 unsigned int talign;
11568 default:
11569 break;
11571 case OMP_CLAUSE_MAP:
11572 case OMP_CLAUSE_TO:
11573 case OMP_CLAUSE_FROM:
11574 oacc_firstprivate_map:
11575 nc = c;
11576 ovar = OMP_CLAUSE_DECL (c);
11577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11578 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11579 || (OMP_CLAUSE_MAP_KIND (c)
11580 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11581 break;
11582 if (!DECL_P (ovar))
11584 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11585 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11587 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11588 == get_base_address (ovar));
11589 nc = OMP_CLAUSE_CHAIN (c);
11590 ovar = OMP_CLAUSE_DECL (nc);
11592 else
11594 tree x = build_sender_ref (ovar, ctx);
11595 tree v
11596 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11597 gimplify_assign (x, v, &ilist);
11598 nc = NULL_TREE;
11601 else
11603 if (DECL_SIZE (ovar)
11604 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11606 tree ovar2 = DECL_VALUE_EXPR (ovar);
11607 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11608 ovar2 = TREE_OPERAND (ovar2, 0);
11609 gcc_assert (DECL_P (ovar2));
11610 ovar = ovar2;
11612 if (!maybe_lookup_field (ovar, ctx))
11613 continue;
11616 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11617 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11618 talign = DECL_ALIGN_UNIT (ovar);
11619 if (nc)
11621 var = lookup_decl_in_outer_ctx (ovar, ctx);
11622 x = build_sender_ref (ovar, ctx);
11624 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11625 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11626 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11627 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11629 gcc_assert (offloaded);
11630 tree avar
11631 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11632 mark_addressable (avar);
11633 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11634 talign = DECL_ALIGN_UNIT (avar);
11635 avar = build_fold_addr_expr (avar);
11636 gimplify_assign (x, avar, &ilist);
11638 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11640 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11641 if (!omp_is_reference (var))
11643 if (is_gimple_reg (var)
11644 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11645 TREE_NO_WARNING (var) = 1;
11646 var = build_fold_addr_expr (var);
11648 else
11649 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11650 gimplify_assign (x, var, &ilist);
11652 else if (is_gimple_reg (var))
11654 gcc_assert (offloaded);
11655 tree avar = create_tmp_var (TREE_TYPE (var));
11656 mark_addressable (avar);
11657 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11658 if (GOMP_MAP_COPY_TO_P (map_kind)
11659 || map_kind == GOMP_MAP_POINTER
11660 || map_kind == GOMP_MAP_TO_PSET
11661 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11663 /* If we need to initialize a temporary
11664 with VAR because it is not addressable, and
11665 the variable hasn't been initialized yet, then
11666 we'll get a warning for the store to avar.
11667 Don't warn in that case, the mapping might
11668 be implicit. */
11669 TREE_NO_WARNING (var) = 1;
11670 gimplify_assign (avar, var, &ilist);
11672 avar = build_fold_addr_expr (avar);
11673 gimplify_assign (x, avar, &ilist);
11674 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11675 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11676 && !TYPE_READONLY (TREE_TYPE (var)))
11678 x = unshare_expr (x);
11679 x = build_simple_mem_ref (x);
11680 gimplify_assign (var, x, &olist);
11683 else
11685 var = build_fold_addr_expr (var);
11686 gimplify_assign (x, var, &ilist);
11689 s = NULL_TREE;
11690 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11692 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11693 s = TREE_TYPE (ovar);
11694 if (TREE_CODE (s) == REFERENCE_TYPE)
11695 s = TREE_TYPE (s);
11696 s = TYPE_SIZE_UNIT (s);
11698 else
11699 s = OMP_CLAUSE_SIZE (c);
11700 if (s == NULL_TREE)
11701 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11702 s = fold_convert (size_type_node, s);
11703 purpose = size_int (map_idx++);
11704 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11705 if (TREE_CODE (s) != INTEGER_CST)
11706 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11708 unsigned HOST_WIDE_INT tkind, tkind_zero;
11709 switch (OMP_CLAUSE_CODE (c))
11711 case OMP_CLAUSE_MAP:
11712 tkind = OMP_CLAUSE_MAP_KIND (c);
11713 tkind_zero = tkind;
11714 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11715 switch (tkind)
11717 case GOMP_MAP_ALLOC:
11718 case GOMP_MAP_TO:
11719 case GOMP_MAP_FROM:
11720 case GOMP_MAP_TOFROM:
11721 case GOMP_MAP_ALWAYS_TO:
11722 case GOMP_MAP_ALWAYS_FROM:
11723 case GOMP_MAP_ALWAYS_TOFROM:
11724 case GOMP_MAP_RELEASE:
11725 case GOMP_MAP_FORCE_TO:
11726 case GOMP_MAP_FORCE_FROM:
11727 case GOMP_MAP_FORCE_TOFROM:
11728 case GOMP_MAP_FORCE_PRESENT:
11729 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11730 break;
11731 case GOMP_MAP_DELETE:
11732 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11733 default:
11734 break;
11736 if (tkind_zero != tkind)
11738 if (integer_zerop (s))
11739 tkind = tkind_zero;
11740 else if (integer_nonzerop (s))
11741 tkind_zero = tkind;
11743 break;
11744 case OMP_CLAUSE_FIRSTPRIVATE:
11745 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11746 tkind = GOMP_MAP_TO;
11747 tkind_zero = tkind;
11748 break;
11749 case OMP_CLAUSE_TO:
11750 tkind = GOMP_MAP_TO;
11751 tkind_zero = tkind;
11752 break;
11753 case OMP_CLAUSE_FROM:
11754 tkind = GOMP_MAP_FROM;
11755 tkind_zero = tkind;
11756 break;
11757 default:
11758 gcc_unreachable ();
11760 gcc_checking_assert (tkind
11761 < (HOST_WIDE_INT_C (1U) << talign_shift));
11762 gcc_checking_assert (tkind_zero
11763 < (HOST_WIDE_INT_C (1U) << talign_shift));
11764 talign = ceil_log2 (talign);
11765 tkind |= talign << talign_shift;
11766 tkind_zero |= talign << talign_shift;
11767 gcc_checking_assert (tkind
11768 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11769 gcc_checking_assert (tkind_zero
11770 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11771 if (tkind == tkind_zero)
11772 x = build_int_cstu (tkind_type, tkind);
11773 else
11775 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11776 x = build3 (COND_EXPR, tkind_type,
11777 fold_build2 (EQ_EXPR, boolean_type_node,
11778 unshare_expr (s), size_zero_node),
11779 build_int_cstu (tkind_type, tkind_zero),
11780 build_int_cstu (tkind_type, tkind));
11782 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11783 if (nc && nc != c)
11784 c = nc;
11785 break;
11787 case OMP_CLAUSE_FIRSTPRIVATE:
11788 if (is_oacc_parallel (ctx))
11789 goto oacc_firstprivate_map;
11790 ovar = OMP_CLAUSE_DECL (c);
11791 if (omp_is_reference (ovar))
11792 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11793 else
11794 talign = DECL_ALIGN_UNIT (ovar);
11795 var = lookup_decl_in_outer_ctx (ovar, ctx);
11796 x = build_sender_ref (ovar, ctx);
11797 tkind = GOMP_MAP_FIRSTPRIVATE;
11798 type = TREE_TYPE (ovar);
11799 if (omp_is_reference (ovar))
11800 type = TREE_TYPE (type);
11801 if ((INTEGRAL_TYPE_P (type)
11802 && TYPE_PRECISION (type) <= POINTER_SIZE)
11803 || TREE_CODE (type) == POINTER_TYPE)
11805 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11806 tree t = var;
11807 if (omp_is_reference (var))
11808 t = build_simple_mem_ref (var);
11809 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11810 TREE_NO_WARNING (var) = 1;
11811 if (TREE_CODE (type) != POINTER_TYPE)
11812 t = fold_convert (pointer_sized_int_node, t);
11813 t = fold_convert (TREE_TYPE (x), t);
11814 gimplify_assign (x, t, &ilist);
11816 else if (omp_is_reference (var))
11817 gimplify_assign (x, var, &ilist);
11818 else if (is_gimple_reg (var))
11820 tree avar = create_tmp_var (TREE_TYPE (var));
11821 mark_addressable (avar);
11822 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11823 TREE_NO_WARNING (var) = 1;
11824 gimplify_assign (avar, var, &ilist);
11825 avar = build_fold_addr_expr (avar);
11826 gimplify_assign (x, avar, &ilist);
11828 else
11830 var = build_fold_addr_expr (var);
11831 gimplify_assign (x, var, &ilist);
11833 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11834 s = size_int (0);
11835 else if (omp_is_reference (ovar))
11836 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11837 else
11838 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11839 s = fold_convert (size_type_node, s);
11840 purpose = size_int (map_idx++);
11841 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11842 if (TREE_CODE (s) != INTEGER_CST)
11843 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11845 gcc_checking_assert (tkind
11846 < (HOST_WIDE_INT_C (1U) << talign_shift));
11847 talign = ceil_log2 (talign);
11848 tkind |= talign << talign_shift;
11849 gcc_checking_assert (tkind
11850 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11851 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11852 build_int_cstu (tkind_type, tkind));
11853 break;
11855 case OMP_CLAUSE_USE_DEVICE_PTR:
11856 case OMP_CLAUSE_USE_DEVICE_ADDR:
11857 case OMP_CLAUSE_IS_DEVICE_PTR:
11858 ovar = OMP_CLAUSE_DECL (c);
11859 var = lookup_decl_in_outer_ctx (ovar, ctx);
11860 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
11862 tkind = GOMP_MAP_USE_DEVICE_PTR;
11863 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
11865 else
11867 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11868 x = build_sender_ref (ovar, ctx);
11870 type = TREE_TYPE (ovar);
11871 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11872 && !omp_is_reference (ovar))
11873 || TREE_CODE (type) == ARRAY_TYPE)
11874 var = build_fold_addr_expr (var);
11875 else
11877 if (omp_is_reference (ovar))
11879 type = TREE_TYPE (type);
11880 if (TREE_CODE (type) != ARRAY_TYPE
11881 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR)
11882 var = build_simple_mem_ref (var);
11883 var = fold_convert (TREE_TYPE (x), var);
11886 gimplify_assign (x, var, &ilist);
11887 s = size_int (0);
11888 purpose = size_int (map_idx++);
11889 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11890 gcc_checking_assert (tkind
11891 < (HOST_WIDE_INT_C (1U) << talign_shift));
11892 gcc_checking_assert (tkind
11893 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11894 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11895 build_int_cstu (tkind_type, tkind));
11896 break;
11899 gcc_assert (map_idx == map_cnt);
11901 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11902 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11903 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11904 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11905 for (int i = 1; i <= 2; i++)
11906 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11908 gimple_seq initlist = NULL;
11909 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11910 TREE_VEC_ELT (t, i)),
11911 &initlist, true, NULL_TREE);
11912 gimple_seq_add_seq (&ilist, initlist);
11914 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
11915 NULL);
11916 TREE_THIS_VOLATILE (clobber) = 1;
11917 gimple_seq_add_stmt (&olist,
11918 gimple_build_assign (TREE_VEC_ELT (t, i),
11919 clobber));
11922 tree clobber = build_constructor (ctx->record_type, NULL);
11923 TREE_THIS_VOLATILE (clobber) = 1;
11924 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11925 clobber));
11928 /* Once all the expansions are done, sequence all the different
11929 fragments inside gimple_omp_body. */
11931 new_body = NULL;
11933 if (offloaded
11934 && ctx->record_type)
11936 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11937 /* fixup_child_record_type might have changed receiver_decl's type. */
11938 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11939 gimple_seq_add_stmt (&new_body,
11940 gimple_build_assign (ctx->receiver_decl, t));
11942 gimple_seq_add_seq (&new_body, fplist);
11944 if (offloaded || data_region)
11946 tree prev = NULL_TREE;
11947 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11948 switch (OMP_CLAUSE_CODE (c))
11950 tree var, x;
11951 default:
11952 break;
11953 case OMP_CLAUSE_FIRSTPRIVATE:
11954 if (is_gimple_omp_oacc (ctx->stmt))
11955 break;
11956 var = OMP_CLAUSE_DECL (c);
11957 if (omp_is_reference (var)
11958 || is_gimple_reg_type (TREE_TYPE (var)))
11960 tree new_var = lookup_decl (var, ctx);
11961 tree type;
11962 type = TREE_TYPE (var);
11963 if (omp_is_reference (var))
11964 type = TREE_TYPE (type);
11965 if ((INTEGRAL_TYPE_P (type)
11966 && TYPE_PRECISION (type) <= POINTER_SIZE)
11967 || TREE_CODE (type) == POINTER_TYPE)
11969 x = build_receiver_ref (var, false, ctx);
11970 if (TREE_CODE (type) != POINTER_TYPE)
11971 x = fold_convert (pointer_sized_int_node, x);
11972 x = fold_convert (type, x);
11973 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11974 fb_rvalue);
11975 if (omp_is_reference (var))
11977 tree v = create_tmp_var_raw (type, get_name (var));
11978 gimple_add_tmp_var (v);
11979 TREE_ADDRESSABLE (v) = 1;
11980 gimple_seq_add_stmt (&new_body,
11981 gimple_build_assign (v, x));
11982 x = build_fold_addr_expr (v);
11984 gimple_seq_add_stmt (&new_body,
11985 gimple_build_assign (new_var, x));
11987 else
11989 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
11990 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11991 fb_rvalue);
11992 gimple_seq_add_stmt (&new_body,
11993 gimple_build_assign (new_var, x));
11996 else if (is_variable_sized (var))
11998 tree pvar = DECL_VALUE_EXPR (var);
11999 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12000 pvar = TREE_OPERAND (pvar, 0);
12001 gcc_assert (DECL_P (pvar));
12002 tree new_var = lookup_decl (pvar, ctx);
12003 x = build_receiver_ref (var, false, ctx);
12004 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12005 gimple_seq_add_stmt (&new_body,
12006 gimple_build_assign (new_var, x));
12008 break;
12009 case OMP_CLAUSE_PRIVATE:
12010 if (is_gimple_omp_oacc (ctx->stmt))
12011 break;
12012 var = OMP_CLAUSE_DECL (c);
12013 if (omp_is_reference (var))
12015 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12016 tree new_var = lookup_decl (var, ctx);
12017 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12018 if (TREE_CONSTANT (x))
12020 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12021 get_name (var));
12022 gimple_add_tmp_var (x);
12023 TREE_ADDRESSABLE (x) = 1;
12024 x = build_fold_addr_expr_loc (clause_loc, x);
12026 else
12027 break;
12029 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12030 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12031 gimple_seq_add_stmt (&new_body,
12032 gimple_build_assign (new_var, x));
12034 break;
12035 case OMP_CLAUSE_USE_DEVICE_PTR:
12036 case OMP_CLAUSE_USE_DEVICE_ADDR:
12037 case OMP_CLAUSE_IS_DEVICE_PTR:
12038 var = OMP_CLAUSE_DECL (c);
12039 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12040 x = build_sender_ref ((splay_tree_key) &DECL_UID (var), ctx);
12041 else
12042 x = build_receiver_ref (var, false, ctx);
12043 if (is_variable_sized (var))
12045 tree pvar = DECL_VALUE_EXPR (var);
12046 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12047 pvar = TREE_OPERAND (pvar, 0);
12048 gcc_assert (DECL_P (pvar));
12049 tree new_var = lookup_decl (pvar, ctx);
12050 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12051 gimple_seq_add_stmt (&new_body,
12052 gimple_build_assign (new_var, x));
12054 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12055 && !omp_is_reference (var))
12056 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12058 tree new_var = lookup_decl (var, ctx);
12059 new_var = DECL_VALUE_EXPR (new_var);
12060 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12061 new_var = TREE_OPERAND (new_var, 0);
12062 gcc_assert (DECL_P (new_var));
12063 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12064 gimple_seq_add_stmt (&new_body,
12065 gimple_build_assign (new_var, x));
12067 else
12069 tree type = TREE_TYPE (var);
12070 tree new_var = lookup_decl (var, ctx);
12071 if (omp_is_reference (var))
12073 type = TREE_TYPE (type);
12074 if (TREE_CODE (type) != ARRAY_TYPE
12075 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR)
12077 tree v = create_tmp_var_raw (type, get_name (var));
12078 gimple_add_tmp_var (v);
12079 TREE_ADDRESSABLE (v) = 1;
12080 x = fold_convert (type, x);
12081 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12082 fb_rvalue);
12083 gimple_seq_add_stmt (&new_body,
12084 gimple_build_assign (v, x));
12085 x = build_fold_addr_expr (v);
12088 new_var = DECL_VALUE_EXPR (new_var);
12089 x = fold_convert (TREE_TYPE (new_var), x);
12090 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12091 gimple_seq_add_stmt (&new_body,
12092 gimple_build_assign (new_var, x));
12094 break;
12096 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12097 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12098 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12099 or references to VLAs. */
12100 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12101 switch (OMP_CLAUSE_CODE (c))
12103 tree var;
12104 default:
12105 break;
12106 case OMP_CLAUSE_MAP:
12107 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12108 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12110 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12111 poly_int64 offset = 0;
12112 gcc_assert (prev);
12113 var = OMP_CLAUSE_DECL (c);
12114 if (DECL_P (var)
12115 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12116 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12117 ctx))
12118 && varpool_node::get_create (var)->offloadable)
12119 break;
12120 if (TREE_CODE (var) == INDIRECT_REF
12121 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12122 var = TREE_OPERAND (var, 0);
12123 if (TREE_CODE (var) == COMPONENT_REF)
12125 var = get_addr_base_and_unit_offset (var, &offset);
12126 gcc_assert (var != NULL_TREE && DECL_P (var));
12128 else if (DECL_SIZE (var)
12129 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12131 tree var2 = DECL_VALUE_EXPR (var);
12132 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12133 var2 = TREE_OPERAND (var2, 0);
12134 gcc_assert (DECL_P (var2));
12135 var = var2;
12137 tree new_var = lookup_decl (var, ctx), x;
12138 tree type = TREE_TYPE (new_var);
12139 bool is_ref;
12140 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12141 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12142 == COMPONENT_REF))
12144 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12145 is_ref = true;
12146 new_var = build2 (MEM_REF, type,
12147 build_fold_addr_expr (new_var),
12148 build_int_cst (build_pointer_type (type),
12149 offset));
12151 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12153 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12154 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12155 new_var = build2 (MEM_REF, type,
12156 build_fold_addr_expr (new_var),
12157 build_int_cst (build_pointer_type (type),
12158 offset));
12160 else
12161 is_ref = omp_is_reference (var);
12162 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12163 is_ref = false;
12164 bool ref_to_array = false;
12165 if (is_ref)
12167 type = TREE_TYPE (type);
12168 if (TREE_CODE (type) == ARRAY_TYPE)
12170 type = build_pointer_type (type);
12171 ref_to_array = true;
12174 else if (TREE_CODE (type) == ARRAY_TYPE)
12176 tree decl2 = DECL_VALUE_EXPR (new_var);
12177 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12178 decl2 = TREE_OPERAND (decl2, 0);
12179 gcc_assert (DECL_P (decl2));
12180 new_var = decl2;
12181 type = TREE_TYPE (new_var);
12183 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12184 x = fold_convert_loc (clause_loc, type, x);
12185 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12187 tree bias = OMP_CLAUSE_SIZE (c);
12188 if (DECL_P (bias))
12189 bias = lookup_decl (bias, ctx);
12190 bias = fold_convert_loc (clause_loc, sizetype, bias);
12191 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12192 bias);
12193 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12194 TREE_TYPE (x), x, bias);
12196 if (ref_to_array)
12197 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12198 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12199 if (is_ref && !ref_to_array)
12201 tree t = create_tmp_var_raw (type, get_name (var));
12202 gimple_add_tmp_var (t);
12203 TREE_ADDRESSABLE (t) = 1;
12204 gimple_seq_add_stmt (&new_body,
12205 gimple_build_assign (t, x));
12206 x = build_fold_addr_expr_loc (clause_loc, t);
12208 gimple_seq_add_stmt (&new_body,
12209 gimple_build_assign (new_var, x));
12210 prev = NULL_TREE;
12212 else if (OMP_CLAUSE_CHAIN (c)
12213 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12214 == OMP_CLAUSE_MAP
12215 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12216 == GOMP_MAP_FIRSTPRIVATE_POINTER
12217 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12219 prev = c;
12220 break;
12221 case OMP_CLAUSE_PRIVATE:
12222 var = OMP_CLAUSE_DECL (c);
12223 if (is_variable_sized (var))
12225 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12226 tree new_var = lookup_decl (var, ctx);
12227 tree pvar = DECL_VALUE_EXPR (var);
12228 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12229 pvar = TREE_OPERAND (pvar, 0);
12230 gcc_assert (DECL_P (pvar));
12231 tree new_pvar = lookup_decl (pvar, ctx);
12232 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12233 tree al = size_int (DECL_ALIGN (var));
12234 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12235 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12236 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12237 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12238 gimple_seq_add_stmt (&new_body,
12239 gimple_build_assign (new_pvar, x));
12241 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12243 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12244 tree new_var = lookup_decl (var, ctx);
12245 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12246 if (TREE_CONSTANT (x))
12247 break;
12248 else
12250 tree atmp
12251 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12252 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12253 tree al = size_int (TYPE_ALIGN (rtype));
12254 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12257 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12258 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12259 gimple_seq_add_stmt (&new_body,
12260 gimple_build_assign (new_var, x));
12262 break;
12265 gimple_seq fork_seq = NULL;
12266 gimple_seq join_seq = NULL;
12268 if (is_oacc_parallel (ctx))
12270 /* If there are reductions on the offloaded region itself, treat
12271 them as a dummy GANG loop. */
12272 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12274 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12275 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12278 gimple_seq_add_seq (&new_body, fork_seq);
12279 gimple_seq_add_seq (&new_body, tgt_body);
12280 gimple_seq_add_seq (&new_body, join_seq);
12282 if (offloaded)
12283 new_body = maybe_catch_exception (new_body);
12285 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12286 gimple_omp_set_body (stmt, new_body);
12289 bind = gimple_build_bind (NULL, NULL,
12290 tgt_bind ? gimple_bind_block (tgt_bind)
12291 : NULL_TREE);
12292 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12293 gimple_bind_add_seq (bind, ilist);
12294 gimple_bind_add_stmt (bind, stmt);
12295 gimple_bind_add_seq (bind, olist);
12297 pop_gimplify_context (NULL);
12299 if (dep_bind)
12301 gimple_bind_add_seq (dep_bind, dep_ilist);
12302 gimple_bind_add_stmt (dep_bind, bind);
12303 gimple_bind_add_seq (dep_bind, dep_olist);
12304 pop_gimplify_context (dep_bind);
12308 /* Expand code for an OpenMP teams directive. */
12310 static void
12311 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12313 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12314 push_gimplify_context ();
12316 tree block = make_node (BLOCK);
12317 gbind *bind = gimple_build_bind (NULL, NULL, block);
12318 gsi_replace (gsi_p, bind, true);
12319 gimple_seq bind_body = NULL;
12320 gimple_seq dlist = NULL;
12321 gimple_seq olist = NULL;
12323 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12324 OMP_CLAUSE_NUM_TEAMS);
12325 if (num_teams == NULL_TREE)
12326 num_teams = build_int_cst (unsigned_type_node, 0);
12327 else
12329 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12330 num_teams = fold_convert (unsigned_type_node, num_teams);
12331 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12333 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12334 OMP_CLAUSE_THREAD_LIMIT);
12335 if (thread_limit == NULL_TREE)
12336 thread_limit = build_int_cst (unsigned_type_node, 0);
12337 else
12339 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12340 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12341 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12342 fb_rvalue);
12345 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12346 &bind_body, &dlist, ctx, NULL);
12347 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12348 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12349 NULL, ctx);
12350 if (!gimple_omp_teams_grid_phony (teams_stmt))
12352 gimple_seq_add_stmt (&bind_body, teams_stmt);
12353 location_t loc = gimple_location (teams_stmt);
12354 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12355 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12356 gimple_set_location (call, loc);
12357 gimple_seq_add_stmt (&bind_body, call);
12360 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12361 gimple_omp_set_body (teams_stmt, NULL);
12362 gimple_seq_add_seq (&bind_body, olist);
12363 gimple_seq_add_seq (&bind_body, dlist);
12364 if (!gimple_omp_teams_grid_phony (teams_stmt))
12365 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12366 gimple_bind_set_body (bind, bind_body);
12368 pop_gimplify_context (bind);
12370 gimple_bind_append_vars (bind, ctx->block_vars);
12371 BLOCK_VARS (block) = ctx->block_vars;
12372 if (BLOCK_VARS (block))
12373 TREE_USED (block) = 1;
12376 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12378 static void
12379 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12381 gimple *stmt = gsi_stmt (*gsi_p);
12382 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12383 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12384 gimple_build_omp_return (false));
12388 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12389 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12390 of OMP context, but with task_shared_vars set. */
12392 static tree
12393 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12394 void *data)
12396 tree t = *tp;
12398 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12399 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12400 return t;
12402 if (task_shared_vars
12403 && DECL_P (t)
12404 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12405 return t;
12407 /* If a global variable has been privatized, TREE_CONSTANT on
12408 ADDR_EXPR might be wrong. */
12409 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12410 recompute_tree_invariant_for_addr_expr (t);
12412 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12413 return NULL_TREE;
12416 /* Data to be communicated between lower_omp_regimplify_operands and
12417 lower_omp_regimplify_operands_p. */
12419 struct lower_omp_regimplify_operands_data
12421 omp_context *ctx;
12422 vec<tree> *decls;
12425 /* Helper function for lower_omp_regimplify_operands. Find
12426 omp_member_access_dummy_var vars and adjust temporarily their
12427 DECL_VALUE_EXPRs if needed. */
12429 static tree
12430 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12431 void *data)
12433 tree t = omp_member_access_dummy_var (*tp);
12434 if (t)
12436 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12437 lower_omp_regimplify_operands_data *ldata
12438 = (lower_omp_regimplify_operands_data *) wi->info;
12439 tree o = maybe_lookup_decl (t, ldata->ctx);
12440 if (o != t)
12442 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12443 ldata->decls->safe_push (*tp);
12444 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12445 SET_DECL_VALUE_EXPR (*tp, v);
12448 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12449 return NULL_TREE;
12452 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12453 of omp_member_access_dummy_var vars during regimplification. */
12455 static void
12456 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12457 gimple_stmt_iterator *gsi_p)
12459 auto_vec<tree, 10> decls;
12460 if (ctx)
12462 struct walk_stmt_info wi;
12463 memset (&wi, '\0', sizeof (wi));
12464 struct lower_omp_regimplify_operands_data data;
12465 data.ctx = ctx;
12466 data.decls = &decls;
12467 wi.info = &data;
12468 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12470 gimple_regimplify_operands (stmt, gsi_p);
12471 while (!decls.is_empty ())
12473 tree t = decls.pop ();
12474 tree v = decls.pop ();
12475 SET_DECL_VALUE_EXPR (t, v);
12479 static void
12480 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12482 gimple *stmt = gsi_stmt (*gsi_p);
12483 struct walk_stmt_info wi;
12484 gcall *call_stmt;
12486 if (gimple_has_location (stmt))
12487 input_location = gimple_location (stmt);
12489 if (task_shared_vars)
12490 memset (&wi, '\0', sizeof (wi));
12492 /* If we have issued syntax errors, avoid doing any heavy lifting.
12493 Just replace the OMP directives with a NOP to avoid
12494 confusing RTL expansion. */
12495 if (seen_error () && is_gimple_omp (stmt))
12497 gsi_replace (gsi_p, gimple_build_nop (), true);
12498 return;
12501 switch (gimple_code (stmt))
12503 case GIMPLE_COND:
12505 gcond *cond_stmt = as_a <gcond *> (stmt);
12506 if ((ctx || task_shared_vars)
12507 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12508 lower_omp_regimplify_p,
12509 ctx ? NULL : &wi, NULL)
12510 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12511 lower_omp_regimplify_p,
12512 ctx ? NULL : &wi, NULL)))
12513 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12515 break;
12516 case GIMPLE_CATCH:
12517 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12518 break;
12519 case GIMPLE_EH_FILTER:
12520 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12521 break;
12522 case GIMPLE_TRY:
12523 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12524 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12525 break;
12526 case GIMPLE_TRANSACTION:
12527 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12528 ctx);
12529 break;
12530 case GIMPLE_BIND:
12531 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12532 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12533 break;
12534 case GIMPLE_OMP_PARALLEL:
12535 case GIMPLE_OMP_TASK:
12536 ctx = maybe_lookup_ctx (stmt);
12537 gcc_assert (ctx);
12538 if (ctx->cancellable)
12539 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12540 lower_omp_taskreg (gsi_p, ctx);
12541 break;
12542 case GIMPLE_OMP_FOR:
12543 ctx = maybe_lookup_ctx (stmt);
12544 gcc_assert (ctx);
12545 if (ctx->cancellable)
12546 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12547 lower_omp_for (gsi_p, ctx);
12548 break;
12549 case GIMPLE_OMP_SECTIONS:
12550 ctx = maybe_lookup_ctx (stmt);
12551 gcc_assert (ctx);
12552 if (ctx->cancellable)
12553 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12554 lower_omp_sections (gsi_p, ctx);
12555 break;
12556 case GIMPLE_OMP_SINGLE:
12557 ctx = maybe_lookup_ctx (stmt);
12558 gcc_assert (ctx);
12559 lower_omp_single (gsi_p, ctx);
12560 break;
12561 case GIMPLE_OMP_MASTER:
12562 ctx = maybe_lookup_ctx (stmt);
12563 gcc_assert (ctx);
12564 lower_omp_master (gsi_p, ctx);
12565 break;
12566 case GIMPLE_OMP_TASKGROUP:
12567 ctx = maybe_lookup_ctx (stmt);
12568 gcc_assert (ctx);
12569 lower_omp_taskgroup (gsi_p, ctx);
12570 break;
12571 case GIMPLE_OMP_ORDERED:
12572 ctx = maybe_lookup_ctx (stmt);
12573 gcc_assert (ctx);
12574 lower_omp_ordered (gsi_p, ctx);
12575 break;
12576 case GIMPLE_OMP_SCAN:
12577 ctx = maybe_lookup_ctx (stmt);
12578 gcc_assert (ctx);
12579 lower_omp_scan (gsi_p, ctx);
12580 break;
12581 case GIMPLE_OMP_CRITICAL:
12582 ctx = maybe_lookup_ctx (stmt);
12583 gcc_assert (ctx);
12584 lower_omp_critical (gsi_p, ctx);
12585 break;
12586 case GIMPLE_OMP_ATOMIC_LOAD:
12587 if ((ctx || task_shared_vars)
12588 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12589 as_a <gomp_atomic_load *> (stmt)),
12590 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12591 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12592 break;
12593 case GIMPLE_OMP_TARGET:
12594 ctx = maybe_lookup_ctx (stmt);
12595 gcc_assert (ctx);
12596 lower_omp_target (gsi_p, ctx);
12597 break;
12598 case GIMPLE_OMP_TEAMS:
12599 ctx = maybe_lookup_ctx (stmt);
12600 gcc_assert (ctx);
12601 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12602 lower_omp_taskreg (gsi_p, ctx);
12603 else
12604 lower_omp_teams (gsi_p, ctx);
12605 break;
12606 case GIMPLE_OMP_GRID_BODY:
12607 ctx = maybe_lookup_ctx (stmt);
12608 gcc_assert (ctx);
12609 lower_omp_grid_body (gsi_p, ctx);
12610 break;
12611 case GIMPLE_CALL:
12612 tree fndecl;
12613 call_stmt = as_a <gcall *> (stmt);
12614 fndecl = gimple_call_fndecl (call_stmt);
12615 if (fndecl
12616 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12617 switch (DECL_FUNCTION_CODE (fndecl))
12619 case BUILT_IN_GOMP_BARRIER:
12620 if (ctx == NULL)
12621 break;
12622 /* FALLTHRU */
12623 case BUILT_IN_GOMP_CANCEL:
12624 case BUILT_IN_GOMP_CANCELLATION_POINT:
12625 omp_context *cctx;
12626 cctx = ctx;
12627 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12628 cctx = cctx->outer;
12629 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12630 if (!cctx->cancellable)
12632 if (DECL_FUNCTION_CODE (fndecl)
12633 == BUILT_IN_GOMP_CANCELLATION_POINT)
12635 stmt = gimple_build_nop ();
12636 gsi_replace (gsi_p, stmt, false);
12638 break;
12640 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12642 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12643 gimple_call_set_fndecl (call_stmt, fndecl);
12644 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12646 tree lhs;
12647 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12648 gimple_call_set_lhs (call_stmt, lhs);
12649 tree fallthru_label;
12650 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12651 gimple *g;
12652 g = gimple_build_label (fallthru_label);
12653 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12654 g = gimple_build_cond (NE_EXPR, lhs,
12655 fold_convert (TREE_TYPE (lhs),
12656 boolean_false_node),
12657 cctx->cancel_label, fallthru_label);
12658 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12659 break;
12660 default:
12661 break;
12663 goto regimplify;
12665 case GIMPLE_ASSIGN:
12666 for (omp_context *up = ctx; up; up = up->outer)
12668 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12669 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12670 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12671 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12672 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12673 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12674 && (gimple_omp_target_kind (up->stmt)
12675 == GF_OMP_TARGET_KIND_DATA)))
12676 continue;
12677 else if (!up->lastprivate_conditional_map)
12678 break;
12679 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12680 if (TREE_CODE (lhs) == MEM_REF
12681 && DECL_P (TREE_OPERAND (lhs, 0))
12682 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12683 0))) == REFERENCE_TYPE)
12684 lhs = TREE_OPERAND (lhs, 0);
12685 if (DECL_P (lhs))
12686 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12688 tree clauses;
12689 if (up->combined_into_simd_safelen1)
12691 up = up->outer;
12692 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12693 up = up->outer;
12695 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12696 clauses = gimple_omp_for_clauses (up->stmt);
12697 else
12698 clauses = gimple_omp_sections_clauses (up->stmt);
12699 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12700 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12701 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12702 OMP_CLAUSE__CONDTEMP_);
12703 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12704 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12705 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12708 /* FALLTHRU */
12710 default:
12711 regimplify:
12712 if ((ctx || task_shared_vars)
12713 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12714 ctx ? NULL : &wi))
12716 /* Just remove clobbers, this should happen only if we have
12717 "privatized" local addressable variables in SIMD regions,
12718 the clobber isn't needed in that case and gimplifying address
12719 of the ARRAY_REF into a pointer and creating MEM_REF based
12720 clobber would create worse code than we get with the clobber
12721 dropped. */
12722 if (gimple_clobber_p (stmt))
12724 gsi_replace (gsi_p, gimple_build_nop (), true);
12725 break;
12727 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12729 break;
12733 static void
12734 lower_omp (gimple_seq *body, omp_context *ctx)
12736 location_t saved_location = input_location;
12737 gimple_stmt_iterator gsi;
12738 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12739 lower_omp_1 (&gsi, ctx);
12740 /* During gimplification, we haven't folded statments inside offloading
12741 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12742 if (target_nesting_level || taskreg_nesting_level)
12743 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12744 fold_stmt (&gsi);
12745 input_location = saved_location;
12748 /* Main entry point. */
12750 static unsigned int
12751 execute_lower_omp (void)
12753 gimple_seq body;
12754 int i;
12755 omp_context *ctx;
12757 /* This pass always runs, to provide PROP_gimple_lomp.
12758 But often, there is nothing to do. */
12759 if (flag_openacc == 0 && flag_openmp == 0
12760 && flag_openmp_simd == 0)
12761 return 0;
12763 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12764 delete_omp_context);
12766 body = gimple_body (current_function_decl);
12768 if (hsa_gen_requested_p ())
12769 omp_grid_gridify_all_targets (&body);
12771 scan_omp (&body, NULL);
12772 gcc_assert (taskreg_nesting_level == 0);
12773 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12774 finish_taskreg_scan (ctx);
12775 taskreg_contexts.release ();
12777 if (all_contexts->root)
12779 if (task_shared_vars)
12780 push_gimplify_context ();
12781 lower_omp (&body, NULL);
12782 if (task_shared_vars)
12783 pop_gimplify_context (NULL);
12786 if (all_contexts)
12788 splay_tree_delete (all_contexts);
12789 all_contexts = NULL;
12791 BITMAP_FREE (task_shared_vars);
12792 BITMAP_FREE (global_nonaddressable_vars);
12794 /* If current function is a method, remove artificial dummy VAR_DECL created
12795 for non-static data member privatization, they aren't needed for
12796 debuginfo nor anything else, have been already replaced everywhere in the
12797 IL and cause problems with LTO. */
12798 if (DECL_ARGUMENTS (current_function_decl)
12799 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12800 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12801 == POINTER_TYPE))
12802 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
12803 return 0;
12806 namespace {
12808 const pass_data pass_data_lower_omp =
12810 GIMPLE_PASS, /* type */
12811 "omplower", /* name */
12812 OPTGROUP_OMP, /* optinfo_flags */
12813 TV_NONE, /* tv_id */
12814 PROP_gimple_any, /* properties_required */
12815 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12816 0, /* properties_destroyed */
12817 0, /* todo_flags_start */
12818 0, /* todo_flags_finish */
12821 class pass_lower_omp : public gimple_opt_pass
12823 public:
12824 pass_lower_omp (gcc::context *ctxt)
12825 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12828 /* opt_pass methods: */
12829 virtual unsigned int execute (function *) { return execute_lower_omp (); }
12831 }; // class pass_lower_omp
12833 } // anon namespace
12835 gimple_opt_pass *
12836 make_pass_lower_omp (gcc::context *ctxt)
12838 return new pass_lower_omp (ctxt);
12841 /* The following is a utility to diagnose structured block violations.
12842 It is not part of the "omplower" pass, as that's invoked too late. It
12843 should be invoked by the respective front ends after gimplification. */
12845 static splay_tree all_labels;
12847 /* Check for mismatched contexts and generate an error if needed. Return
12848 true if an error is detected. */
12850 static bool
12851 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12852 gimple *branch_ctx, gimple *label_ctx)
12854 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12855 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
12857 if (label_ctx == branch_ctx)
12858 return false;
12860 const char* kind = NULL;
12862 if (flag_openacc)
12864 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12865 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
12867 gcc_checking_assert (kind == NULL);
12868 kind = "OpenACC";
12871 if (kind == NULL)
12873 gcc_checking_assert (flag_openmp || flag_openmp_simd);
12874 kind = "OpenMP";
12877 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12878 so we could traverse it and issue a correct "exit" or "enter" error
12879 message upon a structured block violation.
12881 We built the context by building a list with tree_cons'ing, but there is
12882 no easy counterpart in gimple tuples. It seems like far too much work
12883 for issuing exit/enter error messages. If someone really misses the
12884 distinct error message... patches welcome. */
12886 #if 0
12887 /* Try to avoid confusing the user by producing and error message
12888 with correct "exit" or "enter" verbiage. We prefer "exit"
12889 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12890 if (branch_ctx == NULL)
12891 exit_p = false;
12892 else
12894 while (label_ctx)
12896 if (TREE_VALUE (label_ctx) == branch_ctx)
12898 exit_p = false;
12899 break;
12901 label_ctx = TREE_CHAIN (label_ctx);
12905 if (exit_p)
12906 error ("invalid exit from %s structured block", kind);
12907 else
12908 error ("invalid entry to %s structured block", kind);
12909 #endif
12911 /* If it's obvious we have an invalid entry, be specific about the error. */
12912 if (branch_ctx == NULL)
12913 error ("invalid entry to %s structured block", kind);
12914 else
12916 /* Otherwise, be vague and lazy, but efficient. */
12917 error ("invalid branch to/from %s structured block", kind);
12920 gsi_replace (gsi_p, gimple_build_nop (), false);
12921 return true;
12924 /* Pass 1: Create a minimal tree of structured blocks, and record
12925 where each label is found. */
12927 static tree
12928 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12929 struct walk_stmt_info *wi)
12931 gimple *context = (gimple *) wi->info;
12932 gimple *inner_context;
12933 gimple *stmt = gsi_stmt (*gsi_p);
12935 *handled_ops_p = true;
12937 switch (gimple_code (stmt))
12939 WALK_SUBSTMTS;
12941 case GIMPLE_OMP_PARALLEL:
12942 case GIMPLE_OMP_TASK:
12943 case GIMPLE_OMP_SECTIONS:
12944 case GIMPLE_OMP_SINGLE:
12945 case GIMPLE_OMP_SECTION:
12946 case GIMPLE_OMP_MASTER:
12947 case GIMPLE_OMP_ORDERED:
12948 case GIMPLE_OMP_SCAN:
12949 case GIMPLE_OMP_CRITICAL:
12950 case GIMPLE_OMP_TARGET:
12951 case GIMPLE_OMP_TEAMS:
12952 case GIMPLE_OMP_TASKGROUP:
12953 /* The minimal context here is just the current OMP construct. */
12954 inner_context = stmt;
12955 wi->info = inner_context;
12956 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12957 wi->info = context;
12958 break;
12960 case GIMPLE_OMP_FOR:
12961 inner_context = stmt;
12962 wi->info = inner_context;
12963 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12964 walk them. */
12965 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12966 diagnose_sb_1, NULL, wi);
12967 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12968 wi->info = context;
12969 break;
12971 case GIMPLE_LABEL:
12972 splay_tree_insert (all_labels,
12973 (splay_tree_key) gimple_label_label (
12974 as_a <glabel *> (stmt)),
12975 (splay_tree_value) context);
12976 break;
12978 default:
12979 break;
12982 return NULL_TREE;
12985 /* Pass 2: Check each branch and see if its context differs from that of
12986 the destination label's context. */
12988 static tree
12989 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12990 struct walk_stmt_info *wi)
12992 gimple *context = (gimple *) wi->info;
12993 splay_tree_node n;
12994 gimple *stmt = gsi_stmt (*gsi_p);
12996 *handled_ops_p = true;
12998 switch (gimple_code (stmt))
13000 WALK_SUBSTMTS;
13002 case GIMPLE_OMP_PARALLEL:
13003 case GIMPLE_OMP_TASK:
13004 case GIMPLE_OMP_SECTIONS:
13005 case GIMPLE_OMP_SINGLE:
13006 case GIMPLE_OMP_SECTION:
13007 case GIMPLE_OMP_MASTER:
13008 case GIMPLE_OMP_ORDERED:
13009 case GIMPLE_OMP_SCAN:
13010 case GIMPLE_OMP_CRITICAL:
13011 case GIMPLE_OMP_TARGET:
13012 case GIMPLE_OMP_TEAMS:
13013 case GIMPLE_OMP_TASKGROUP:
13014 wi->info = stmt;
13015 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13016 wi->info = context;
13017 break;
13019 case GIMPLE_OMP_FOR:
13020 wi->info = stmt;
13021 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13022 walk them. */
13023 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13024 diagnose_sb_2, NULL, wi);
13025 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13026 wi->info = context;
13027 break;
13029 case GIMPLE_COND:
13031 gcond *cond_stmt = as_a <gcond *> (stmt);
13032 tree lab = gimple_cond_true_label (cond_stmt);
13033 if (lab)
13035 n = splay_tree_lookup (all_labels,
13036 (splay_tree_key) lab);
13037 diagnose_sb_0 (gsi_p, context,
13038 n ? (gimple *) n->value : NULL);
13040 lab = gimple_cond_false_label (cond_stmt);
13041 if (lab)
13043 n = splay_tree_lookup (all_labels,
13044 (splay_tree_key) lab);
13045 diagnose_sb_0 (gsi_p, context,
13046 n ? (gimple *) n->value : NULL);
13049 break;
13051 case GIMPLE_GOTO:
13053 tree lab = gimple_goto_dest (stmt);
13054 if (TREE_CODE (lab) != LABEL_DECL)
13055 break;
13057 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13058 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13060 break;
13062 case GIMPLE_SWITCH:
13064 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13065 unsigned int i;
13066 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13068 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13069 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13070 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13071 break;
13074 break;
13076 case GIMPLE_RETURN:
13077 diagnose_sb_0 (gsi_p, context, NULL);
13078 break;
13080 default:
13081 break;
13084 return NULL_TREE;
13087 static unsigned int
13088 diagnose_omp_structured_block_errors (void)
13090 struct walk_stmt_info wi;
13091 gimple_seq body = gimple_body (current_function_decl);
13093 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13095 memset (&wi, 0, sizeof (wi));
13096 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13098 memset (&wi, 0, sizeof (wi));
13099 wi.want_locations = true;
13100 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13102 gimple_set_body (current_function_decl, body);
13104 splay_tree_delete (all_labels);
13105 all_labels = NULL;
13107 return 0;
13110 namespace {
13112 const pass_data pass_data_diagnose_omp_blocks =
13114 GIMPLE_PASS, /* type */
13115 "*diagnose_omp_blocks", /* name */
13116 OPTGROUP_OMP, /* optinfo_flags */
13117 TV_NONE, /* tv_id */
13118 PROP_gimple_any, /* properties_required */
13119 0, /* properties_provided */
13120 0, /* properties_destroyed */
13121 0, /* todo_flags_start */
13122 0, /* todo_flags_finish */
13125 class pass_diagnose_omp_blocks : public gimple_opt_pass
13127 public:
13128 pass_diagnose_omp_blocks (gcc::context *ctxt)
13129 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13132 /* opt_pass methods: */
13133 virtual bool gate (function *)
13135 return flag_openacc || flag_openmp || flag_openmp_simd;
13137 virtual unsigned int execute (function *)
13139 return diagnose_omp_structured_block_errors ();
13142 }; // class pass_diagnose_omp_blocks
13144 } // anon namespace
13146 gimple_opt_pass *
13147 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13149 return new pass_diagnose_omp_blocks (ctxt);
13153 #include "gt-omp-low.h"