pr70100.c: Add -mvsx.
[official-gcc.git] / gcc / omp-low.c
blob279b6ef893ad704ce503860f5cf70f0652064779
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
138 /* True if this construct can be cancelled. */
139 bool cancellable;
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen1;
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase;
154 /* True if there is order(concurrent) clause on the construct. */
155 bool order_concurrent;
157 /* True if there is bind clause on the construct (i.e. a loop construct). */
158 bool loop_p;
161 static splay_tree all_contexts;
162 static int taskreg_nesting_level;
163 static int target_nesting_level;
164 static bitmap task_shared_vars;
165 static bitmap global_nonaddressable_vars;
166 static vec<omp_context *> taskreg_contexts;
168 static void scan_omp (gimple_seq *, omp_context *);
169 static tree scan_omp_1_op (tree *, int *, void *);
171 #define WALK_SUBSTMTS \
172 case GIMPLE_BIND: \
173 case GIMPLE_TRY: \
174 case GIMPLE_CATCH: \
175 case GIMPLE_EH_FILTER: \
176 case GIMPLE_TRANSACTION: \
177 /* The sub-statements for these should be walked. */ \
178 *handled_ops_p = false; \
179 break;
181 /* Return true if CTX corresponds to an oacc parallel region. */
183 static bool
184 is_oacc_parallel (omp_context *ctx)
186 enum gimple_code outer_type = gimple_code (ctx->stmt);
187 return ((outer_type == GIMPLE_OMP_TARGET)
188 && (gimple_omp_target_kind (ctx->stmt)
189 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
192 /* Return true if CTX corresponds to an oacc kernels region. */
194 static bool
195 is_oacc_kernels (omp_context *ctx)
197 enum gimple_code outer_type = gimple_code (ctx->stmt);
198 return ((outer_type == GIMPLE_OMP_TARGET)
199 && (gimple_omp_target_kind (ctx->stmt)
200 == GF_OMP_TARGET_KIND_OACC_KERNELS));
203 /* If DECL is the artificial dummy VAR_DECL created for non-static
204 data member privatization, return the underlying "this" parameter,
205 otherwise return NULL. */
207 tree
208 omp_member_access_dummy_var (tree decl)
210 if (!VAR_P (decl)
211 || !DECL_ARTIFICIAL (decl)
212 || !DECL_IGNORED_P (decl)
213 || !DECL_HAS_VALUE_EXPR_P (decl)
214 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
215 return NULL_TREE;
217 tree v = DECL_VALUE_EXPR (decl);
218 if (TREE_CODE (v) != COMPONENT_REF)
219 return NULL_TREE;
221 while (1)
222 switch (TREE_CODE (v))
224 case COMPONENT_REF:
225 case MEM_REF:
226 case INDIRECT_REF:
227 CASE_CONVERT:
228 case POINTER_PLUS_EXPR:
229 v = TREE_OPERAND (v, 0);
230 continue;
231 case PARM_DECL:
232 if (DECL_CONTEXT (v) == current_function_decl
233 && DECL_ARTIFICIAL (v)
234 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
235 return v;
236 return NULL_TREE;
237 default:
238 return NULL_TREE;
242 /* Helper for unshare_and_remap, called through walk_tree. */
244 static tree
245 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
247 tree *pair = (tree *) data;
248 if (*tp == pair[0])
250 *tp = unshare_expr (pair[1]);
251 *walk_subtrees = 0;
253 else if (IS_TYPE_OR_DECL_P (*tp))
254 *walk_subtrees = 0;
255 return NULL_TREE;
258 /* Return unshare_expr (X) with all occurrences of FROM
259 replaced with TO. */
261 static tree
262 unshare_and_remap (tree x, tree from, tree to)
264 tree pair[2] = { from, to };
265 x = unshare_expr (x);
266 walk_tree (&x, unshare_and_remap_1, pair, NULL);
267 return x;
270 /* Convenience function for calling scan_omp_1_op on tree operands. */
272 static inline tree
273 scan_omp_op (tree *tp, omp_context *ctx)
275 struct walk_stmt_info wi;
277 memset (&wi, 0, sizeof (wi));
278 wi.info = ctx;
279 wi.want_locations = true;
281 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
284 static void lower_omp (gimple_seq *, omp_context *);
285 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
286 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
288 /* Return true if CTX is for an omp parallel. */
290 static inline bool
291 is_parallel_ctx (omp_context *ctx)
293 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
297 /* Return true if CTX is for an omp task. */
299 static inline bool
300 is_task_ctx (omp_context *ctx)
302 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
306 /* Return true if CTX is for an omp taskloop. */
308 static inline bool
309 is_taskloop_ctx (omp_context *ctx)
311 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
312 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
316 /* Return true if CTX is for a host omp teams. */
318 static inline bool
319 is_host_teams_ctx (omp_context *ctx)
321 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
322 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
325 /* Return true if CTX is for an omp parallel or omp task or host omp teams
326 (the last one is strictly not a task region in OpenMP speak, but we
327 need to treat it similarly). */
329 static inline bool
330 is_taskreg_ctx (omp_context *ctx)
332 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
335 /* Return true if EXPR is variable sized. */
337 static inline bool
338 is_variable_sized (const_tree expr)
340 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
343 /* Lookup variables. The "maybe" form
344 allows for the variable form to not have been entered, otherwise we
345 assert that the variable must have been entered. */
347 static inline tree
348 lookup_decl (tree var, omp_context *ctx)
350 tree *n = ctx->cb.decl_map->get (var);
351 return *n;
354 static inline tree
355 maybe_lookup_decl (const_tree var, omp_context *ctx)
357 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
358 return n ? *n : NULL_TREE;
361 static inline tree
362 lookup_field (tree var, omp_context *ctx)
364 splay_tree_node n;
365 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
366 return (tree) n->value;
369 static inline tree
370 lookup_sfield (splay_tree_key key, omp_context *ctx)
372 splay_tree_node n;
373 n = splay_tree_lookup (ctx->sfield_map
374 ? ctx->sfield_map : ctx->field_map, key);
375 return (tree) n->value;
378 static inline tree
379 lookup_sfield (tree var, omp_context *ctx)
381 return lookup_sfield ((splay_tree_key) var, ctx);
384 static inline tree
385 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
387 splay_tree_node n;
388 n = splay_tree_lookup (ctx->field_map, key);
389 return n ? (tree) n->value : NULL_TREE;
392 static inline tree
393 maybe_lookup_field (tree var, omp_context *ctx)
395 return maybe_lookup_field ((splay_tree_key) var, ctx);
398 /* Return true if DECL should be copied by pointer. SHARED_CTX is
399 the parallel context if DECL is to be shared. */
401 static bool
402 use_pointer_for_field (tree decl, omp_context *shared_ctx)
404 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
405 || TYPE_ATOMIC (TREE_TYPE (decl)))
406 return true;
408 /* We can only use copy-in/copy-out semantics for shared variables
409 when we know the value is not accessible from an outer scope. */
410 if (shared_ctx)
412 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
414 /* ??? Trivially accessible from anywhere. But why would we even
415 be passing an address in this case? Should we simply assert
416 this to be false, or should we have a cleanup pass that removes
417 these from the list of mappings? */
418 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
419 return true;
421 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
422 without analyzing the expression whether or not its location
423 is accessible to anyone else. In the case of nested parallel
424 regions it certainly may be. */
425 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
426 return true;
428 /* Do not use copy-in/copy-out for variables that have their
429 address taken. */
430 if (is_global_var (decl))
432 /* For file scope vars, track whether we've seen them as
433 non-addressable initially and in that case, keep the same
434 answer for the duration of the pass, even when they are made
435 addressable later on e.g. through reduction expansion. Global
436 variables which weren't addressable before the pass will not
437 have their privatized copies address taken. See PR91216. */
438 if (!TREE_ADDRESSABLE (decl))
440 if (!global_nonaddressable_vars)
441 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
444 else if (!global_nonaddressable_vars
445 || !bitmap_bit_p (global_nonaddressable_vars,
446 DECL_UID (decl)))
447 return true;
449 else if (TREE_ADDRESSABLE (decl))
450 return true;
452 /* lower_send_shared_vars only uses copy-in, but not copy-out
453 for these. */
454 if (TREE_READONLY (decl)
455 || ((TREE_CODE (decl) == RESULT_DECL
456 || TREE_CODE (decl) == PARM_DECL)
457 && DECL_BY_REFERENCE (decl)))
458 return false;
460 /* Disallow copy-in/out in nested parallel if
461 decl is shared in outer parallel, otherwise
462 each thread could store the shared variable
463 in its own copy-in location, making the
464 variable no longer really shared. */
465 if (shared_ctx->is_nested)
467 omp_context *up;
469 for (up = shared_ctx->outer; up; up = up->outer)
470 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
471 break;
473 if (up)
475 tree c;
477 for (c = gimple_omp_taskreg_clauses (up->stmt);
478 c; c = OMP_CLAUSE_CHAIN (c))
479 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
480 && OMP_CLAUSE_DECL (c) == decl)
481 break;
483 if (c)
484 goto maybe_mark_addressable_and_ret;
488 /* For tasks avoid using copy-in/out. As tasks can be
489 deferred or executed in different thread, when GOMP_task
490 returns, the task hasn't necessarily terminated. */
491 if (is_task_ctx (shared_ctx))
493 tree outer;
494 maybe_mark_addressable_and_ret:
495 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
496 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
498 /* Taking address of OUTER in lower_send_shared_vars
499 might need regimplification of everything that uses the
500 variable. */
501 if (!task_shared_vars)
502 task_shared_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
504 TREE_ADDRESSABLE (outer) = 1;
506 return true;
510 return false;
513 /* Construct a new automatic decl similar to VAR. */
515 static tree
516 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
518 tree copy = copy_var_decl (var, name, type);
520 DECL_CONTEXT (copy) = current_function_decl;
521 DECL_CHAIN (copy) = ctx->block_vars;
522 /* If VAR is listed in task_shared_vars, it means it wasn't
523 originally addressable and is just because task needs to take
524 it's address. But we don't need to take address of privatizations
525 from that var. */
526 if (TREE_ADDRESSABLE (var)
527 && ((task_shared_vars
528 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
529 || (global_nonaddressable_vars
530 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
531 TREE_ADDRESSABLE (copy) = 0;
532 ctx->block_vars = copy;
534 return copy;
537 static tree
538 omp_copy_decl_1 (tree var, omp_context *ctx)
540 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
543 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
544 as appropriate. */
545 static tree
546 omp_build_component_ref (tree obj, tree field)
548 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
549 if (TREE_THIS_VOLATILE (field))
550 TREE_THIS_VOLATILE (ret) |= 1;
551 if (TREE_READONLY (field))
552 TREE_READONLY (ret) |= 1;
553 return ret;
556 /* Build tree nodes to access the field for VAR on the receiver side. */
558 static tree
559 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
561 tree x, field = lookup_field (var, ctx);
563 /* If the receiver record type was remapped in the child function,
564 remap the field into the new record type. */
565 x = maybe_lookup_field (field, ctx);
566 if (x != NULL)
567 field = x;
569 x = build_simple_mem_ref (ctx->receiver_decl);
570 TREE_THIS_NOTRAP (x) = 1;
571 x = omp_build_component_ref (x, field);
572 if (by_ref)
574 x = build_simple_mem_ref (x);
575 TREE_THIS_NOTRAP (x) = 1;
578 return x;
581 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
582 of a parallel, this is a component reference; for workshare constructs
583 this is some variable. */
585 static tree
586 build_outer_var_ref (tree var, omp_context *ctx,
587 enum omp_clause_code code = OMP_CLAUSE_ERROR)
589 tree x;
590 omp_context *outer = ctx->outer;
591 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
592 outer = outer->outer;
594 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
595 x = var;
596 else if (is_variable_sized (var))
598 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
599 x = build_outer_var_ref (x, ctx, code);
600 x = build_simple_mem_ref (x);
602 else if (is_taskreg_ctx (ctx))
604 bool by_ref = use_pointer_for_field (var, NULL);
605 x = build_receiver_ref (var, by_ref, ctx);
607 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
608 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
609 || ctx->loop_p
610 || (code == OMP_CLAUSE_PRIVATE
611 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
612 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
613 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
615 /* #pragma omp simd isn't a worksharing construct, and can reference
616 even private vars in its linear etc. clauses.
617 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
618 to private vars in all worksharing constructs. */
619 x = NULL_TREE;
620 if (outer && is_taskreg_ctx (outer))
621 x = lookup_decl (var, outer);
622 else if (outer)
623 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
624 if (x == NULL_TREE)
625 x = var;
627 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
629 gcc_assert (outer);
630 splay_tree_node n
631 = splay_tree_lookup (outer->field_map,
632 (splay_tree_key) &DECL_UID (var));
633 if (n == NULL)
635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
636 x = var;
637 else
638 x = lookup_decl (var, outer);
640 else
642 tree field = (tree) n->value;
643 /* If the receiver record type was remapped in the child function,
644 remap the field into the new record type. */
645 x = maybe_lookup_field (field, outer);
646 if (x != NULL)
647 field = x;
649 x = build_simple_mem_ref (outer->receiver_decl);
650 x = omp_build_component_ref (x, field);
651 if (use_pointer_for_field (var, outer))
652 x = build_simple_mem_ref (x);
655 else if (outer)
657 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
659 outer = outer->outer;
660 gcc_assert (outer
661 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
663 x = lookup_decl (var, outer);
665 else if (omp_is_reference (var))
666 /* This can happen with orphaned constructs. If var is reference, it is
667 possible it is shared and as such valid. */
668 x = var;
669 else if (omp_member_access_dummy_var (var))
670 x = var;
671 else
672 gcc_unreachable ();
674 if (x == var)
676 tree t = omp_member_access_dummy_var (var);
677 if (t)
679 x = DECL_VALUE_EXPR (var);
680 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
681 if (o != t)
682 x = unshare_and_remap (x, t, o);
683 else
684 x = unshare_expr (x);
688 if (omp_is_reference (var))
689 x = build_simple_mem_ref (x);
691 return x;
694 /* Build tree nodes to access the field for VAR on the sender side. */
696 static tree
697 build_sender_ref (splay_tree_key key, omp_context *ctx)
699 tree field = lookup_sfield (key, ctx);
700 return omp_build_component_ref (ctx->sender_decl, field);
703 static tree
704 build_sender_ref (tree var, omp_context *ctx)
706 return build_sender_ref ((splay_tree_key) var, ctx);
709 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
710 BASE_POINTERS_RESTRICT, declare the field with restrict. */
712 static void
713 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
715 tree field, type, sfield = NULL_TREE;
716 splay_tree_key key = (splay_tree_key) var;
718 if ((mask & 8) != 0)
720 key = (splay_tree_key) &DECL_UID (var);
721 gcc_checking_assert (key != (splay_tree_key) var);
723 gcc_assert ((mask & 1) == 0
724 || !splay_tree_lookup (ctx->field_map, key));
725 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
726 || !splay_tree_lookup (ctx->sfield_map, key));
727 gcc_assert ((mask & 3) == 3
728 || !is_gimple_omp_oacc (ctx->stmt));
730 type = TREE_TYPE (var);
731 /* Prevent redeclaring the var in the split-off function with a restrict
732 pointer type. Note that we only clear type itself, restrict qualifiers in
733 the pointed-to type will be ignored by points-to analysis. */
734 if (POINTER_TYPE_P (type)
735 && TYPE_RESTRICT (type))
736 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
738 if (mask & 4)
740 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
741 type = build_pointer_type (build_pointer_type (type));
743 else if (by_ref)
744 type = build_pointer_type (type);
745 else if ((mask & 3) == 1 && omp_is_reference (var))
746 type = TREE_TYPE (type);
748 field = build_decl (DECL_SOURCE_LOCATION (var),
749 FIELD_DECL, DECL_NAME (var), type);
751 /* Remember what variable this field was created for. This does have a
752 side effect of making dwarf2out ignore this member, so for helpful
753 debugging we clear it later in delete_omp_context. */
754 DECL_ABSTRACT_ORIGIN (field) = var;
755 if (type == TREE_TYPE (var))
757 SET_DECL_ALIGN (field, DECL_ALIGN (var));
758 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
759 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
761 else
762 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
764 if ((mask & 3) == 3)
766 insert_field_into_struct (ctx->record_type, field);
767 if (ctx->srecord_type)
769 sfield = build_decl (DECL_SOURCE_LOCATION (var),
770 FIELD_DECL, DECL_NAME (var), type);
771 DECL_ABSTRACT_ORIGIN (sfield) = var;
772 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
773 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
774 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
775 insert_field_into_struct (ctx->srecord_type, sfield);
778 else
780 if (ctx->srecord_type == NULL_TREE)
782 tree t;
784 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
785 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
786 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
788 sfield = build_decl (DECL_SOURCE_LOCATION (t),
789 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
790 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
791 insert_field_into_struct (ctx->srecord_type, sfield);
792 splay_tree_insert (ctx->sfield_map,
793 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
794 (splay_tree_value) sfield);
797 sfield = field;
798 insert_field_into_struct ((mask & 1) ? ctx->record_type
799 : ctx->srecord_type, field);
802 if (mask & 1)
803 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
804 if ((mask & 2) && ctx->sfield_map)
805 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
808 static tree
809 install_var_local (tree var, omp_context *ctx)
811 tree new_var = omp_copy_decl_1 (var, ctx);
812 insert_decl_map (&ctx->cb, var, new_var);
813 return new_var;
816 /* Adjust the replacement for DECL in CTX for the new context. This means
817 copying the DECL_VALUE_EXPR, and fixing up the type. */
819 static void
820 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
822 tree new_decl, size;
824 new_decl = lookup_decl (decl, ctx);
826 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
828 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
829 && DECL_HAS_VALUE_EXPR_P (decl))
831 tree ve = DECL_VALUE_EXPR (decl);
832 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
833 SET_DECL_VALUE_EXPR (new_decl, ve);
834 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
837 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
839 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
840 if (size == error_mark_node)
841 size = TYPE_SIZE (TREE_TYPE (new_decl));
842 DECL_SIZE (new_decl) = size;
844 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
845 if (size == error_mark_node)
846 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
847 DECL_SIZE_UNIT (new_decl) = size;
851 /* The callback for remap_decl. Search all containing contexts for a
852 mapping of the variable; this avoids having to duplicate the splay
853 tree ahead of time. We know a mapping doesn't already exist in the
854 given context. Create new mappings to implement default semantics. */
856 static tree
857 omp_copy_decl (tree var, copy_body_data *cb)
859 omp_context *ctx = (omp_context *) cb;
860 tree new_var;
862 if (TREE_CODE (var) == LABEL_DECL)
864 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
865 return var;
866 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
867 DECL_CONTEXT (new_var) = current_function_decl;
868 insert_decl_map (&ctx->cb, var, new_var);
869 return new_var;
872 while (!is_taskreg_ctx (ctx))
874 ctx = ctx->outer;
875 if (ctx == NULL)
876 return var;
877 new_var = maybe_lookup_decl (var, ctx);
878 if (new_var)
879 return new_var;
882 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
883 return var;
885 return error_mark_node;
888 /* Create a new context, with OUTER_CTX being the surrounding context. */
890 static omp_context *
891 new_omp_context (gimple *stmt, omp_context *outer_ctx)
893 omp_context *ctx = XCNEW (omp_context);
895 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
896 (splay_tree_value) ctx);
897 ctx->stmt = stmt;
899 if (outer_ctx)
901 ctx->outer = outer_ctx;
902 ctx->cb = outer_ctx->cb;
903 ctx->cb.block = NULL;
904 ctx->depth = outer_ctx->depth + 1;
906 else
908 ctx->cb.src_fn = current_function_decl;
909 ctx->cb.dst_fn = current_function_decl;
910 ctx->cb.src_node = cgraph_node::get (current_function_decl);
911 gcc_checking_assert (ctx->cb.src_node);
912 ctx->cb.dst_node = ctx->cb.src_node;
913 ctx->cb.src_cfun = cfun;
914 ctx->cb.copy_decl = omp_copy_decl;
915 ctx->cb.eh_lp_nr = 0;
916 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
917 ctx->cb.adjust_array_error_bounds = true;
918 ctx->cb.dont_remap_vla_if_no_change = true;
919 ctx->depth = 1;
922 ctx->cb.decl_map = new hash_map<tree, tree>;
924 return ctx;
927 static gimple_seq maybe_catch_exception (gimple_seq);
929 /* Finalize task copyfn. */
931 static void
932 finalize_task_copyfn (gomp_task *task_stmt)
934 struct function *child_cfun;
935 tree child_fn;
936 gimple_seq seq = NULL, new_seq;
937 gbind *bind;
939 child_fn = gimple_omp_task_copy_fn (task_stmt);
940 if (child_fn == NULL_TREE)
941 return;
943 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
944 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
946 push_cfun (child_cfun);
947 bind = gimplify_body (child_fn, false);
948 gimple_seq_add_stmt (&seq, bind);
949 new_seq = maybe_catch_exception (seq);
950 if (new_seq != seq)
952 bind = gimple_build_bind (NULL, new_seq, NULL);
953 seq = NULL;
954 gimple_seq_add_stmt (&seq, bind);
956 gimple_set_body (child_fn, seq);
957 pop_cfun ();
959 /* Inform the callgraph about the new function. */
960 cgraph_node *node = cgraph_node::get_create (child_fn);
961 node->parallelized_function = 1;
962 cgraph_node::add_new_function (child_fn, false);
965 /* Destroy a omp_context data structures. Called through the splay tree
966 value delete callback. */
968 static void
969 delete_omp_context (splay_tree_value value)
971 omp_context *ctx = (omp_context *) value;
973 delete ctx->cb.decl_map;
975 if (ctx->field_map)
976 splay_tree_delete (ctx->field_map);
977 if (ctx->sfield_map)
978 splay_tree_delete (ctx->sfield_map);
980 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
981 it produces corrupt debug information. */
982 if (ctx->record_type)
984 tree t;
985 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
986 DECL_ABSTRACT_ORIGIN (t) = NULL;
988 if (ctx->srecord_type)
990 tree t;
991 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
992 DECL_ABSTRACT_ORIGIN (t) = NULL;
995 if (is_task_ctx (ctx))
996 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
998 if (ctx->task_reduction_map)
1000 ctx->task_reductions.release ();
1001 delete ctx->task_reduction_map;
1004 delete ctx->lastprivate_conditional_map;
1006 XDELETE (ctx);
1009 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1010 context. */
1012 static void
1013 fixup_child_record_type (omp_context *ctx)
1015 tree f, type = ctx->record_type;
1017 if (!ctx->receiver_decl)
1018 return;
1019 /* ??? It isn't sufficient to just call remap_type here, because
1020 variably_modified_type_p doesn't work the way we expect for
1021 record types. Testing each field for whether it needs remapping
1022 and creating a new record by hand works, however. */
1023 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1024 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1025 break;
1026 if (f)
1028 tree name, new_fields = NULL;
1030 type = lang_hooks.types.make_type (RECORD_TYPE);
1031 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1032 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1033 TYPE_DECL, name, type);
1034 TYPE_NAME (type) = name;
1036 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1038 tree new_f = copy_node (f);
1039 DECL_CONTEXT (new_f) = type;
1040 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1041 DECL_CHAIN (new_f) = new_fields;
1042 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1043 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1044 &ctx->cb, NULL);
1045 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1046 &ctx->cb, NULL);
1047 new_fields = new_f;
1049 /* Arrange to be able to look up the receiver field
1050 given the sender field. */
1051 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1052 (splay_tree_value) new_f);
1054 TYPE_FIELDS (type) = nreverse (new_fields);
1055 layout_type (type);
1058 /* In a target region we never modify any of the pointers in *.omp_data_i,
1059 so attempt to help the optimizers. */
1060 if (is_gimple_omp_offloaded (ctx->stmt))
1061 type = build_qualified_type (type, TYPE_QUAL_CONST);
1063 TREE_TYPE (ctx->receiver_decl)
1064 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1067 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1068 specified by CLAUSES. */
1070 static void
1071 scan_sharing_clauses (tree clauses, omp_context *ctx)
1073 tree c, decl;
1074 bool scan_array_reductions = false;
1076 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1078 bool by_ref;
1080 switch (OMP_CLAUSE_CODE (c))
1082 case OMP_CLAUSE_PRIVATE:
1083 decl = OMP_CLAUSE_DECL (c);
1084 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1085 goto do_private;
1086 else if (!is_variable_sized (decl))
1087 install_var_local (decl, ctx);
1088 break;
1090 case OMP_CLAUSE_SHARED:
1091 decl = OMP_CLAUSE_DECL (c);
1092 /* Ignore shared directives in teams construct inside of
1093 target construct. */
1094 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1095 && !is_host_teams_ctx (ctx))
1097 /* Global variables don't need to be copied,
1098 the receiver side will use them directly. */
1099 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1100 if (is_global_var (odecl))
1101 break;
1102 insert_decl_map (&ctx->cb, decl, odecl);
1103 break;
1105 gcc_assert (is_taskreg_ctx (ctx));
1106 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1107 || !is_variable_sized (decl));
1108 /* Global variables don't need to be copied,
1109 the receiver side will use them directly. */
1110 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1111 break;
1112 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1114 use_pointer_for_field (decl, ctx);
1115 break;
1117 by_ref = use_pointer_for_field (decl, NULL);
1118 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1119 || TREE_ADDRESSABLE (decl)
1120 || by_ref
1121 || omp_is_reference (decl))
1123 by_ref = use_pointer_for_field (decl, ctx);
1124 install_var_field (decl, by_ref, 3, ctx);
1125 install_var_local (decl, ctx);
1126 break;
1128 /* We don't need to copy const scalar vars back. */
1129 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1130 goto do_private;
1132 case OMP_CLAUSE_REDUCTION:
1133 case OMP_CLAUSE_IN_REDUCTION:
1134 decl = OMP_CLAUSE_DECL (c);
1135 if (TREE_CODE (decl) == MEM_REF)
1137 tree t = TREE_OPERAND (decl, 0);
1138 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1139 t = TREE_OPERAND (t, 0);
1140 if (TREE_CODE (t) == INDIRECT_REF
1141 || TREE_CODE (t) == ADDR_EXPR)
1142 t = TREE_OPERAND (t, 0);
1143 install_var_local (t, ctx);
1144 if (is_taskreg_ctx (ctx)
1145 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1146 || (is_task_ctx (ctx)
1147 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1148 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1149 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1150 == POINTER_TYPE)))))
1151 && !is_variable_sized (t)
1152 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1153 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1154 && !is_task_ctx (ctx))))
1156 by_ref = use_pointer_for_field (t, NULL);
1157 if (is_task_ctx (ctx)
1158 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1159 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1161 install_var_field (t, false, 1, ctx);
1162 install_var_field (t, by_ref, 2, ctx);
1164 else
1165 install_var_field (t, by_ref, 3, ctx);
1167 break;
1169 if (is_task_ctx (ctx)
1170 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1171 && OMP_CLAUSE_REDUCTION_TASK (c)
1172 && is_parallel_ctx (ctx)))
1174 /* Global variables don't need to be copied,
1175 the receiver side will use them directly. */
1176 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1178 by_ref = use_pointer_for_field (decl, ctx);
1179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1180 install_var_field (decl, by_ref, 3, ctx);
1182 install_var_local (decl, ctx);
1183 break;
1185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1186 && OMP_CLAUSE_REDUCTION_TASK (c))
1188 install_var_local (decl, ctx);
1189 break;
1191 goto do_private;
1193 case OMP_CLAUSE_LASTPRIVATE:
1194 /* Let the corresponding firstprivate clause create
1195 the variable. */
1196 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1197 break;
1198 /* FALLTHRU */
1200 case OMP_CLAUSE_FIRSTPRIVATE:
1201 case OMP_CLAUSE_LINEAR:
1202 decl = OMP_CLAUSE_DECL (c);
1203 do_private:
1204 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1205 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1206 && is_gimple_omp_offloaded (ctx->stmt))
1208 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1209 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1210 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1211 install_var_field (decl, true, 3, ctx);
1212 else
1213 install_var_field (decl, false, 3, ctx);
1215 if (is_variable_sized (decl))
1217 if (is_task_ctx (ctx))
1218 install_var_field (decl, false, 1, ctx);
1219 break;
1221 else if (is_taskreg_ctx (ctx))
1223 bool global
1224 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1225 by_ref = use_pointer_for_field (decl, NULL);
1227 if (is_task_ctx (ctx)
1228 && (global || by_ref || omp_is_reference (decl)))
1230 install_var_field (decl, false, 1, ctx);
1231 if (!global)
1232 install_var_field (decl, by_ref, 2, ctx);
1234 else if (!global)
1235 install_var_field (decl, by_ref, 3, ctx);
1237 install_var_local (decl, ctx);
1238 break;
1240 case OMP_CLAUSE_USE_DEVICE_PTR:
1241 case OMP_CLAUSE_USE_DEVICE_ADDR:
1242 decl = OMP_CLAUSE_DECL (c);
1243 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1244 && !omp_is_reference (decl)
1245 && !omp_is_allocatable_or_ptr (decl))
1246 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1247 install_var_field (decl, true, 11, ctx);
1248 else
1249 install_var_field (decl, false, 11, ctx);
1250 if (DECL_SIZE (decl)
1251 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1253 tree decl2 = DECL_VALUE_EXPR (decl);
1254 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1255 decl2 = TREE_OPERAND (decl2, 0);
1256 gcc_assert (DECL_P (decl2));
1257 install_var_local (decl2, ctx);
1259 install_var_local (decl, ctx);
1260 break;
1262 case OMP_CLAUSE_IS_DEVICE_PTR:
1263 decl = OMP_CLAUSE_DECL (c);
1264 goto do_private;
1266 case OMP_CLAUSE__LOOPTEMP_:
1267 case OMP_CLAUSE__REDUCTEMP_:
1268 gcc_assert (is_taskreg_ctx (ctx));
1269 decl = OMP_CLAUSE_DECL (c);
1270 install_var_field (decl, false, 3, ctx);
1271 install_var_local (decl, ctx);
1272 break;
1274 case OMP_CLAUSE_COPYPRIVATE:
1275 case OMP_CLAUSE_COPYIN:
1276 decl = OMP_CLAUSE_DECL (c);
1277 by_ref = use_pointer_for_field (decl, NULL);
1278 install_var_field (decl, by_ref, 3, ctx);
1279 break;
1281 case OMP_CLAUSE_FINAL:
1282 case OMP_CLAUSE_IF:
1283 case OMP_CLAUSE_NUM_THREADS:
1284 case OMP_CLAUSE_NUM_TEAMS:
1285 case OMP_CLAUSE_THREAD_LIMIT:
1286 case OMP_CLAUSE_DEVICE:
1287 case OMP_CLAUSE_SCHEDULE:
1288 case OMP_CLAUSE_DIST_SCHEDULE:
1289 case OMP_CLAUSE_DEPEND:
1290 case OMP_CLAUSE_PRIORITY:
1291 case OMP_CLAUSE_GRAINSIZE:
1292 case OMP_CLAUSE_NUM_TASKS:
1293 case OMP_CLAUSE_NUM_GANGS:
1294 case OMP_CLAUSE_NUM_WORKERS:
1295 case OMP_CLAUSE_VECTOR_LENGTH:
1296 if (ctx->outer)
1297 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1298 break;
1300 case OMP_CLAUSE_TO:
1301 case OMP_CLAUSE_FROM:
1302 case OMP_CLAUSE_MAP:
1303 if (ctx->outer)
1304 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1305 decl = OMP_CLAUSE_DECL (c);
1306 /* Global variables with "omp declare target" attribute
1307 don't need to be copied, the receiver side will use them
1308 directly. However, global variables with "omp declare target link"
1309 attribute need to be copied. Or when ALWAYS modifier is used. */
1310 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1311 && DECL_P (decl)
1312 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1313 && (OMP_CLAUSE_MAP_KIND (c)
1314 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1315 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1316 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1317 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1318 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1319 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1320 && varpool_node::get_create (decl)->offloadable
1321 && !lookup_attribute ("omp declare target link",
1322 DECL_ATTRIBUTES (decl)))
1323 break;
1324 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1325 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1327 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1328 not offloaded; there is nothing to map for those. */
1329 if (!is_gimple_omp_offloaded (ctx->stmt)
1330 && !POINTER_TYPE_P (TREE_TYPE (decl))
1331 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1332 break;
1334 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1335 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1336 || (OMP_CLAUSE_MAP_KIND (c)
1337 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1339 if (TREE_CODE (decl) == COMPONENT_REF
1340 || (TREE_CODE (decl) == INDIRECT_REF
1341 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1342 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1343 == REFERENCE_TYPE)))
1344 break;
1345 if (DECL_SIZE (decl)
1346 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1348 tree decl2 = DECL_VALUE_EXPR (decl);
1349 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1350 decl2 = TREE_OPERAND (decl2, 0);
1351 gcc_assert (DECL_P (decl2));
1352 install_var_local (decl2, ctx);
1354 install_var_local (decl, ctx);
1355 break;
1357 if (DECL_P (decl))
1359 if (DECL_SIZE (decl)
1360 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1362 tree decl2 = DECL_VALUE_EXPR (decl);
1363 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1364 decl2 = TREE_OPERAND (decl2, 0);
1365 gcc_assert (DECL_P (decl2));
1366 install_var_field (decl2, true, 3, ctx);
1367 install_var_local (decl2, ctx);
1368 install_var_local (decl, ctx);
1370 else
1372 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1373 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1374 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1375 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1376 install_var_field (decl, true, 7, ctx);
1377 else
1378 install_var_field (decl, true, 3, ctx);
1379 if (is_gimple_omp_offloaded (ctx->stmt)
1380 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1381 install_var_local (decl, ctx);
1384 else
1386 tree base = get_base_address (decl);
1387 tree nc = OMP_CLAUSE_CHAIN (c);
1388 if (DECL_P (base)
1389 && nc != NULL_TREE
1390 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1391 && OMP_CLAUSE_DECL (nc) == base
1392 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1393 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1395 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1396 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1398 else
1400 if (ctx->outer)
1402 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1403 decl = OMP_CLAUSE_DECL (c);
1405 gcc_assert (!splay_tree_lookup (ctx->field_map,
1406 (splay_tree_key) decl));
1407 tree field
1408 = build_decl (OMP_CLAUSE_LOCATION (c),
1409 FIELD_DECL, NULL_TREE, ptr_type_node);
1410 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1411 insert_field_into_struct (ctx->record_type, field);
1412 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1413 (splay_tree_value) field);
1416 break;
1418 case OMP_CLAUSE__GRIDDIM_:
1419 if (ctx->outer)
1421 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1422 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1424 break;
1426 case OMP_CLAUSE_ORDER:
1427 ctx->order_concurrent = true;
1428 break;
1430 case OMP_CLAUSE_BIND:
1431 ctx->loop_p = true;
1432 break;
1434 case OMP_CLAUSE_NOWAIT:
1435 case OMP_CLAUSE_ORDERED:
1436 case OMP_CLAUSE_COLLAPSE:
1437 case OMP_CLAUSE_UNTIED:
1438 case OMP_CLAUSE_MERGEABLE:
1439 case OMP_CLAUSE_PROC_BIND:
1440 case OMP_CLAUSE_SAFELEN:
1441 case OMP_CLAUSE_SIMDLEN:
1442 case OMP_CLAUSE_THREADS:
1443 case OMP_CLAUSE_SIMD:
1444 case OMP_CLAUSE_NOGROUP:
1445 case OMP_CLAUSE_DEFAULTMAP:
1446 case OMP_CLAUSE_ASYNC:
1447 case OMP_CLAUSE_WAIT:
1448 case OMP_CLAUSE_GANG:
1449 case OMP_CLAUSE_WORKER:
1450 case OMP_CLAUSE_VECTOR:
1451 case OMP_CLAUSE_INDEPENDENT:
1452 case OMP_CLAUSE_AUTO:
1453 case OMP_CLAUSE_SEQ:
1454 case OMP_CLAUSE_TILE:
1455 case OMP_CLAUSE__SIMT_:
1456 case OMP_CLAUSE_DEFAULT:
1457 case OMP_CLAUSE_NONTEMPORAL:
1458 case OMP_CLAUSE_IF_PRESENT:
1459 case OMP_CLAUSE_FINALIZE:
1460 case OMP_CLAUSE_TASK_REDUCTION:
1461 break;
1463 case OMP_CLAUSE_ALIGNED:
1464 decl = OMP_CLAUSE_DECL (c);
1465 if (is_global_var (decl)
1466 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1467 install_var_local (decl, ctx);
1468 break;
1470 case OMP_CLAUSE__CONDTEMP_:
1471 decl = OMP_CLAUSE_DECL (c);
1472 if (is_parallel_ctx (ctx))
1474 install_var_field (decl, false, 3, ctx);
1475 install_var_local (decl, ctx);
1477 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1478 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1479 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1480 install_var_local (decl, ctx);
1481 break;
1483 case OMP_CLAUSE__CACHE_:
1484 default:
1485 gcc_unreachable ();
1489 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1491 switch (OMP_CLAUSE_CODE (c))
1493 case OMP_CLAUSE_LASTPRIVATE:
1494 /* Let the corresponding firstprivate clause create
1495 the variable. */
1496 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1497 scan_array_reductions = true;
1498 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1499 break;
1500 /* FALLTHRU */
1502 case OMP_CLAUSE_FIRSTPRIVATE:
1503 case OMP_CLAUSE_PRIVATE:
1504 case OMP_CLAUSE_LINEAR:
1505 case OMP_CLAUSE_IS_DEVICE_PTR:
1506 decl = OMP_CLAUSE_DECL (c);
1507 if (is_variable_sized (decl))
1509 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1510 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1511 && is_gimple_omp_offloaded (ctx->stmt))
1513 tree decl2 = DECL_VALUE_EXPR (decl);
1514 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1515 decl2 = TREE_OPERAND (decl2, 0);
1516 gcc_assert (DECL_P (decl2));
1517 install_var_local (decl2, ctx);
1518 fixup_remapped_decl (decl2, ctx, false);
1520 install_var_local (decl, ctx);
1522 fixup_remapped_decl (decl, ctx,
1523 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1524 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1525 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1526 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1527 scan_array_reductions = true;
1528 break;
1530 case OMP_CLAUSE_REDUCTION:
1531 case OMP_CLAUSE_IN_REDUCTION:
1532 decl = OMP_CLAUSE_DECL (c);
1533 if (TREE_CODE (decl) != MEM_REF)
1535 if (is_variable_sized (decl))
1536 install_var_local (decl, ctx);
1537 fixup_remapped_decl (decl, ctx, false);
1539 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1540 scan_array_reductions = true;
1541 break;
1543 case OMP_CLAUSE_TASK_REDUCTION:
1544 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1545 scan_array_reductions = true;
1546 break;
1548 case OMP_CLAUSE_SHARED:
1549 /* Ignore shared directives in teams construct inside of
1550 target construct. */
1551 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1552 && !is_host_teams_ctx (ctx))
1553 break;
1554 decl = OMP_CLAUSE_DECL (c);
1555 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1556 break;
1557 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1559 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1560 ctx->outer)))
1561 break;
1562 bool by_ref = use_pointer_for_field (decl, ctx);
1563 install_var_field (decl, by_ref, 11, ctx);
1564 break;
1566 fixup_remapped_decl (decl, ctx, false);
1567 break;
1569 case OMP_CLAUSE_MAP:
1570 if (!is_gimple_omp_offloaded (ctx->stmt))
1571 break;
1572 decl = OMP_CLAUSE_DECL (c);
1573 if (DECL_P (decl)
1574 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1575 && (OMP_CLAUSE_MAP_KIND (c)
1576 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1577 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1578 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1579 && varpool_node::get_create (decl)->offloadable)
1580 break;
1581 if (DECL_P (decl))
1583 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1584 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1585 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1586 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1588 tree new_decl = lookup_decl (decl, ctx);
1589 TREE_TYPE (new_decl)
1590 = remap_type (TREE_TYPE (decl), &ctx->cb);
1592 else if (DECL_SIZE (decl)
1593 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1595 tree decl2 = DECL_VALUE_EXPR (decl);
1596 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1597 decl2 = TREE_OPERAND (decl2, 0);
1598 gcc_assert (DECL_P (decl2));
1599 fixup_remapped_decl (decl2, ctx, false);
1600 fixup_remapped_decl (decl, ctx, true);
1602 else
1603 fixup_remapped_decl (decl, ctx, false);
1605 break;
1607 case OMP_CLAUSE_COPYPRIVATE:
1608 case OMP_CLAUSE_COPYIN:
1609 case OMP_CLAUSE_DEFAULT:
1610 case OMP_CLAUSE_IF:
1611 case OMP_CLAUSE_NUM_THREADS:
1612 case OMP_CLAUSE_NUM_TEAMS:
1613 case OMP_CLAUSE_THREAD_LIMIT:
1614 case OMP_CLAUSE_DEVICE:
1615 case OMP_CLAUSE_SCHEDULE:
1616 case OMP_CLAUSE_DIST_SCHEDULE:
1617 case OMP_CLAUSE_NOWAIT:
1618 case OMP_CLAUSE_ORDERED:
1619 case OMP_CLAUSE_COLLAPSE:
1620 case OMP_CLAUSE_UNTIED:
1621 case OMP_CLAUSE_FINAL:
1622 case OMP_CLAUSE_MERGEABLE:
1623 case OMP_CLAUSE_PROC_BIND:
1624 case OMP_CLAUSE_SAFELEN:
1625 case OMP_CLAUSE_SIMDLEN:
1626 case OMP_CLAUSE_ALIGNED:
1627 case OMP_CLAUSE_DEPEND:
1628 case OMP_CLAUSE__LOOPTEMP_:
1629 case OMP_CLAUSE__REDUCTEMP_:
1630 case OMP_CLAUSE_TO:
1631 case OMP_CLAUSE_FROM:
1632 case OMP_CLAUSE_PRIORITY:
1633 case OMP_CLAUSE_GRAINSIZE:
1634 case OMP_CLAUSE_NUM_TASKS:
1635 case OMP_CLAUSE_THREADS:
1636 case OMP_CLAUSE_SIMD:
1637 case OMP_CLAUSE_NOGROUP:
1638 case OMP_CLAUSE_DEFAULTMAP:
1639 case OMP_CLAUSE_ORDER:
1640 case OMP_CLAUSE_BIND:
1641 case OMP_CLAUSE_USE_DEVICE_PTR:
1642 case OMP_CLAUSE_USE_DEVICE_ADDR:
1643 case OMP_CLAUSE_NONTEMPORAL:
1644 case OMP_CLAUSE_ASYNC:
1645 case OMP_CLAUSE_WAIT:
1646 case OMP_CLAUSE_NUM_GANGS:
1647 case OMP_CLAUSE_NUM_WORKERS:
1648 case OMP_CLAUSE_VECTOR_LENGTH:
1649 case OMP_CLAUSE_GANG:
1650 case OMP_CLAUSE_WORKER:
1651 case OMP_CLAUSE_VECTOR:
1652 case OMP_CLAUSE_INDEPENDENT:
1653 case OMP_CLAUSE_AUTO:
1654 case OMP_CLAUSE_SEQ:
1655 case OMP_CLAUSE_TILE:
1656 case OMP_CLAUSE__GRIDDIM_:
1657 case OMP_CLAUSE__SIMT_:
1658 case OMP_CLAUSE_IF_PRESENT:
1659 case OMP_CLAUSE_FINALIZE:
1660 case OMP_CLAUSE__CONDTEMP_:
1661 break;
1663 case OMP_CLAUSE__CACHE_:
1664 default:
1665 gcc_unreachable ();
1669 gcc_checking_assert (!scan_array_reductions
1670 || !is_gimple_omp_oacc (ctx->stmt));
1671 if (scan_array_reductions)
1673 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1674 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1675 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1676 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1677 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1679 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1680 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1682 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1683 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1684 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1685 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1686 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1687 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1691 /* Create a new name for omp child function. Returns an identifier. */
1693 static tree
1694 create_omp_child_function_name (bool task_copy)
1696 return clone_function_name_numbered (current_function_decl,
1697 task_copy ? "_omp_cpyfn" : "_omp_fn");
1700 /* Return true if CTX may belong to offloaded code: either if current function
1701 is offloaded, or any enclosing context corresponds to a target region. */
1703 static bool
1704 omp_maybe_offloaded_ctx (omp_context *ctx)
1706 if (cgraph_node::get (current_function_decl)->offloadable)
1707 return true;
1708 for (; ctx; ctx = ctx->outer)
1709 if (is_gimple_omp_offloaded (ctx->stmt))
1710 return true;
1711 return false;
1714 /* Build a decl for the omp child function. It'll not contain a body
1715 yet, just the bare decl. */
1717 static void
1718 create_omp_child_function (omp_context *ctx, bool task_copy)
1720 tree decl, type, name, t;
1722 name = create_omp_child_function_name (task_copy);
1723 if (task_copy)
1724 type = build_function_type_list (void_type_node, ptr_type_node,
1725 ptr_type_node, NULL_TREE);
1726 else
1727 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1729 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1731 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1732 || !task_copy);
1733 if (!task_copy)
1734 ctx->cb.dst_fn = decl;
1735 else
1736 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1738 TREE_STATIC (decl) = 1;
1739 TREE_USED (decl) = 1;
1740 DECL_ARTIFICIAL (decl) = 1;
1741 DECL_IGNORED_P (decl) = 0;
1742 TREE_PUBLIC (decl) = 0;
1743 DECL_UNINLINABLE (decl) = 1;
1744 DECL_EXTERNAL (decl) = 0;
1745 DECL_CONTEXT (decl) = NULL_TREE;
1746 DECL_INITIAL (decl) = make_node (BLOCK);
1747 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1748 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1749 /* Remove omp declare simd attribute from the new attributes. */
1750 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1752 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1753 a = a2;
1754 a = TREE_CHAIN (a);
1755 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1756 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1757 *p = TREE_CHAIN (*p);
1758 else
1760 tree chain = TREE_CHAIN (*p);
1761 *p = copy_node (*p);
1762 p = &TREE_CHAIN (*p);
1763 *p = chain;
1766 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1767 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1768 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1769 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1770 DECL_FUNCTION_VERSIONED (decl)
1771 = DECL_FUNCTION_VERSIONED (current_function_decl);
1773 if (omp_maybe_offloaded_ctx (ctx))
1775 cgraph_node::get_create (decl)->offloadable = 1;
1776 if (ENABLE_OFFLOADING)
1777 g->have_offload = true;
1780 if (cgraph_node::get_create (decl)->offloadable
1781 && !lookup_attribute ("omp declare target",
1782 DECL_ATTRIBUTES (current_function_decl)))
1784 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1785 ? "omp target entrypoint"
1786 : "omp declare target");
1787 DECL_ATTRIBUTES (decl)
1788 = tree_cons (get_identifier (target_attr),
1789 NULL_TREE, DECL_ATTRIBUTES (decl));
1792 t = build_decl (DECL_SOURCE_LOCATION (decl),
1793 RESULT_DECL, NULL_TREE, void_type_node);
1794 DECL_ARTIFICIAL (t) = 1;
1795 DECL_IGNORED_P (t) = 1;
1796 DECL_CONTEXT (t) = decl;
1797 DECL_RESULT (decl) = t;
1799 tree data_name = get_identifier (".omp_data_i");
1800 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1801 ptr_type_node);
1802 DECL_ARTIFICIAL (t) = 1;
1803 DECL_NAMELESS (t) = 1;
1804 DECL_ARG_TYPE (t) = ptr_type_node;
1805 DECL_CONTEXT (t) = current_function_decl;
1806 TREE_USED (t) = 1;
1807 TREE_READONLY (t) = 1;
1808 DECL_ARGUMENTS (decl) = t;
1809 if (!task_copy)
1810 ctx->receiver_decl = t;
1811 else
1813 t = build_decl (DECL_SOURCE_LOCATION (decl),
1814 PARM_DECL, get_identifier (".omp_data_o"),
1815 ptr_type_node);
1816 DECL_ARTIFICIAL (t) = 1;
1817 DECL_NAMELESS (t) = 1;
1818 DECL_ARG_TYPE (t) = ptr_type_node;
1819 DECL_CONTEXT (t) = current_function_decl;
1820 TREE_USED (t) = 1;
1821 TREE_ADDRESSABLE (t) = 1;
1822 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1823 DECL_ARGUMENTS (decl) = t;
1826 /* Allocate memory for the function structure. The call to
1827 allocate_struct_function clobbers CFUN, so we need to restore
1828 it afterward. */
1829 push_struct_function (decl);
1830 cfun->function_end_locus = gimple_location (ctx->stmt);
1831 init_tree_ssa (cfun);
1832 pop_cfun ();
1835 /* Callback for walk_gimple_seq. Check if combined parallel
1836 contains gimple_omp_for_combined_into_p OMP_FOR. */
1838 tree
1839 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1840 bool *handled_ops_p,
1841 struct walk_stmt_info *wi)
1843 gimple *stmt = gsi_stmt (*gsi_p);
1845 *handled_ops_p = true;
1846 switch (gimple_code (stmt))
1848 WALK_SUBSTMTS;
1850 case GIMPLE_OMP_FOR:
1851 if (gimple_omp_for_combined_into_p (stmt)
1852 && gimple_omp_for_kind (stmt)
1853 == *(const enum gf_mask *) (wi->info))
1855 wi->info = stmt;
1856 return integer_zero_node;
1858 break;
1859 default:
1860 break;
1862 return NULL;
1865 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1867 static void
1868 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1869 omp_context *outer_ctx)
1871 struct walk_stmt_info wi;
1873 memset (&wi, 0, sizeof (wi));
1874 wi.val_only = true;
1875 wi.info = (void *) &msk;
1876 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1877 if (wi.info != (void *) &msk)
1879 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1880 struct omp_for_data fd;
1881 omp_extract_for_data (for_stmt, &fd, NULL);
1882 /* We need two temporaries with fd.loop.v type (istart/iend)
1883 and then (fd.collapse - 1) temporaries with the same
1884 type for count2 ... countN-1 vars if not constant. */
1885 size_t count = 2, i;
1886 tree type = fd.iter_type;
1887 if (fd.collapse > 1
1888 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1890 count += fd.collapse - 1;
1891 /* If there are lastprivate clauses on the inner
1892 GIMPLE_OMP_FOR, add one more temporaries for the total number
1893 of iterations (product of count1 ... countN-1). */
1894 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1895 OMP_CLAUSE_LASTPRIVATE))
1896 count++;
1897 else if (msk == GF_OMP_FOR_KIND_FOR
1898 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1899 OMP_CLAUSE_LASTPRIVATE))
1900 count++;
1902 for (i = 0; i < count; i++)
1904 tree temp = create_tmp_var (type);
1905 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1906 insert_decl_map (&outer_ctx->cb, temp, temp);
1907 OMP_CLAUSE_DECL (c) = temp;
1908 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1909 gimple_omp_taskreg_set_clauses (stmt, c);
1912 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1913 && omp_find_clause (gimple_omp_task_clauses (stmt),
1914 OMP_CLAUSE_REDUCTION))
1916 tree type = build_pointer_type (pointer_sized_int_node);
1917 tree temp = create_tmp_var (type);
1918 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1919 insert_decl_map (&outer_ctx->cb, temp, temp);
1920 OMP_CLAUSE_DECL (c) = temp;
1921 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1922 gimple_omp_task_set_clauses (stmt, c);
1926 /* Scan an OpenMP parallel directive. */
1928 static void
1929 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1931 omp_context *ctx;
1932 tree name;
1933 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1935 /* Ignore parallel directives with empty bodies, unless there
1936 are copyin clauses. */
1937 if (optimize > 0
1938 && empty_body_p (gimple_omp_body (stmt))
1939 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1940 OMP_CLAUSE_COPYIN) == NULL)
1942 gsi_replace (gsi, gimple_build_nop (), false);
1943 return;
1946 if (gimple_omp_parallel_combined_p (stmt))
1947 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1948 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1949 OMP_CLAUSE_REDUCTION);
1950 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1951 if (OMP_CLAUSE_REDUCTION_TASK (c))
1953 tree type = build_pointer_type (pointer_sized_int_node);
1954 tree temp = create_tmp_var (type);
1955 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1956 if (outer_ctx)
1957 insert_decl_map (&outer_ctx->cb, temp, temp);
1958 OMP_CLAUSE_DECL (c) = temp;
1959 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1960 gimple_omp_parallel_set_clauses (stmt, c);
1961 break;
1963 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1964 break;
1966 ctx = new_omp_context (stmt, outer_ctx);
1967 taskreg_contexts.safe_push (ctx);
1968 if (taskreg_nesting_level > 1)
1969 ctx->is_nested = true;
1970 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1971 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1972 name = create_tmp_var_name (".omp_data_s");
1973 name = build_decl (gimple_location (stmt),
1974 TYPE_DECL, name, ctx->record_type);
1975 DECL_ARTIFICIAL (name) = 1;
1976 DECL_NAMELESS (name) = 1;
1977 TYPE_NAME (ctx->record_type) = name;
1978 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1979 if (!gimple_omp_parallel_grid_phony (stmt))
1981 create_omp_child_function (ctx, false);
1982 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1985 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1986 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1988 if (TYPE_FIELDS (ctx->record_type) == NULL)
1989 ctx->record_type = ctx->receiver_decl = NULL;
1992 /* Scan an OpenMP task directive. */
1994 static void
1995 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1997 omp_context *ctx;
1998 tree name, t;
1999 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2001 /* Ignore task directives with empty bodies, unless they have depend
2002 clause. */
2003 if (optimize > 0
2004 && gimple_omp_body (stmt)
2005 && empty_body_p (gimple_omp_body (stmt))
2006 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2008 gsi_replace (gsi, gimple_build_nop (), false);
2009 return;
2012 if (gimple_omp_task_taskloop_p (stmt))
2013 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2015 ctx = new_omp_context (stmt, outer_ctx);
2017 if (gimple_omp_task_taskwait_p (stmt))
2019 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2020 return;
2023 taskreg_contexts.safe_push (ctx);
2024 if (taskreg_nesting_level > 1)
2025 ctx->is_nested = true;
2026 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2027 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2028 name = create_tmp_var_name (".omp_data_s");
2029 name = build_decl (gimple_location (stmt),
2030 TYPE_DECL, name, ctx->record_type);
2031 DECL_ARTIFICIAL (name) = 1;
2032 DECL_NAMELESS (name) = 1;
2033 TYPE_NAME (ctx->record_type) = name;
2034 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2035 create_omp_child_function (ctx, false);
2036 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2038 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2040 if (ctx->srecord_type)
2042 name = create_tmp_var_name (".omp_data_a");
2043 name = build_decl (gimple_location (stmt),
2044 TYPE_DECL, name, ctx->srecord_type);
2045 DECL_ARTIFICIAL (name) = 1;
2046 DECL_NAMELESS (name) = 1;
2047 TYPE_NAME (ctx->srecord_type) = name;
2048 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2049 create_omp_child_function (ctx, true);
2052 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2054 if (TYPE_FIELDS (ctx->record_type) == NULL)
2056 ctx->record_type = ctx->receiver_decl = NULL;
2057 t = build_int_cst (long_integer_type_node, 0);
2058 gimple_omp_task_set_arg_size (stmt, t);
2059 t = build_int_cst (long_integer_type_node, 1);
2060 gimple_omp_task_set_arg_align (stmt, t);
2064 /* Helper function for finish_taskreg_scan, called through walk_tree.
2065 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2066 tree, replace it in the expression. */
2068 static tree
2069 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2071 if (VAR_P (*tp))
2073 omp_context *ctx = (omp_context *) data;
2074 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2075 if (t != *tp)
2077 if (DECL_HAS_VALUE_EXPR_P (t))
2078 t = unshare_expr (DECL_VALUE_EXPR (t));
2079 *tp = t;
2081 *walk_subtrees = 0;
2083 else if (IS_TYPE_OR_DECL_P (*tp))
2084 *walk_subtrees = 0;
2085 return NULL_TREE;
2088 /* If any decls have been made addressable during scan_omp,
2089 adjust their fields if needed, and layout record types
2090 of parallel/task constructs. */
2092 static void
2093 finish_taskreg_scan (omp_context *ctx)
2095 if (ctx->record_type == NULL_TREE)
2096 return;
2098 /* If any task_shared_vars were needed, verify all
2099 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2100 statements if use_pointer_for_field hasn't changed
2101 because of that. If it did, update field types now. */
2102 if (task_shared_vars)
2104 tree c;
2106 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2107 c; c = OMP_CLAUSE_CHAIN (c))
2108 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2109 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2111 tree decl = OMP_CLAUSE_DECL (c);
2113 /* Global variables don't need to be copied,
2114 the receiver side will use them directly. */
2115 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2116 continue;
2117 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2118 || !use_pointer_for_field (decl, ctx))
2119 continue;
2120 tree field = lookup_field (decl, ctx);
2121 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2122 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2123 continue;
2124 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2125 TREE_THIS_VOLATILE (field) = 0;
2126 DECL_USER_ALIGN (field) = 0;
2127 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2128 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2129 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2130 if (ctx->srecord_type)
2132 tree sfield = lookup_sfield (decl, ctx);
2133 TREE_TYPE (sfield) = TREE_TYPE (field);
2134 TREE_THIS_VOLATILE (sfield) = 0;
2135 DECL_USER_ALIGN (sfield) = 0;
2136 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2137 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2138 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2143 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2145 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2146 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2147 if (c)
2149 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2150 expects to find it at the start of data. */
2151 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2152 tree *p = &TYPE_FIELDS (ctx->record_type);
2153 while (*p)
2154 if (*p == f)
2156 *p = DECL_CHAIN (*p);
2157 break;
2159 else
2160 p = &DECL_CHAIN (*p);
2161 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2162 TYPE_FIELDS (ctx->record_type) = f;
2164 layout_type (ctx->record_type);
2165 fixup_child_record_type (ctx);
2167 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2169 layout_type (ctx->record_type);
2170 fixup_child_record_type (ctx);
2172 else
2174 location_t loc = gimple_location (ctx->stmt);
2175 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2176 /* Move VLA fields to the end. */
2177 p = &TYPE_FIELDS (ctx->record_type);
2178 while (*p)
2179 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2180 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2182 *q = *p;
2183 *p = TREE_CHAIN (*p);
2184 TREE_CHAIN (*q) = NULL_TREE;
2185 q = &TREE_CHAIN (*q);
2187 else
2188 p = &DECL_CHAIN (*p);
2189 *p = vla_fields;
2190 if (gimple_omp_task_taskloop_p (ctx->stmt))
2192 /* Move fields corresponding to first and second _looptemp_
2193 clause first. There are filled by GOMP_taskloop
2194 and thus need to be in specific positions. */
2195 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2196 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2197 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2198 OMP_CLAUSE__LOOPTEMP_);
2199 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2200 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2201 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2202 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2203 p = &TYPE_FIELDS (ctx->record_type);
2204 while (*p)
2205 if (*p == f1 || *p == f2 || *p == f3)
2206 *p = DECL_CHAIN (*p);
2207 else
2208 p = &DECL_CHAIN (*p);
2209 DECL_CHAIN (f1) = f2;
2210 if (c3)
2212 DECL_CHAIN (f2) = f3;
2213 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2215 else
2216 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2217 TYPE_FIELDS (ctx->record_type) = f1;
2218 if (ctx->srecord_type)
2220 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2221 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2222 if (c3)
2223 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2224 p = &TYPE_FIELDS (ctx->srecord_type);
2225 while (*p)
2226 if (*p == f1 || *p == f2 || *p == f3)
2227 *p = DECL_CHAIN (*p);
2228 else
2229 p = &DECL_CHAIN (*p);
2230 DECL_CHAIN (f1) = f2;
2231 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2232 if (c3)
2234 DECL_CHAIN (f2) = f3;
2235 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2237 else
2238 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2239 TYPE_FIELDS (ctx->srecord_type) = f1;
2242 layout_type (ctx->record_type);
2243 fixup_child_record_type (ctx);
2244 if (ctx->srecord_type)
2245 layout_type (ctx->srecord_type);
2246 tree t = fold_convert_loc (loc, long_integer_type_node,
2247 TYPE_SIZE_UNIT (ctx->record_type));
2248 if (TREE_CODE (t) != INTEGER_CST)
2250 t = unshare_expr (t);
2251 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2253 gimple_omp_task_set_arg_size (ctx->stmt, t);
2254 t = build_int_cst (long_integer_type_node,
2255 TYPE_ALIGN_UNIT (ctx->record_type));
2256 gimple_omp_task_set_arg_align (ctx->stmt, t);
2260 /* Find the enclosing offload context. */
2262 static omp_context *
2263 enclosing_target_ctx (omp_context *ctx)
2265 for (; ctx; ctx = ctx->outer)
2266 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2267 break;
2269 return ctx;
2272 /* Return true if ctx is part of an oacc kernels region. */
2274 static bool
2275 ctx_in_oacc_kernels_region (omp_context *ctx)
2277 for (;ctx != NULL; ctx = ctx->outer)
2279 gimple *stmt = ctx->stmt;
2280 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2281 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2282 return true;
2285 return false;
2288 /* Check the parallelism clauses inside a kernels regions.
2289 Until kernels handling moves to use the same loop indirection
2290 scheme as parallel, we need to do this checking early. */
2292 static unsigned
2293 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2295 bool checking = true;
2296 unsigned outer_mask = 0;
2297 unsigned this_mask = 0;
2298 bool has_seq = false, has_auto = false;
2300 if (ctx->outer)
2301 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2302 if (!stmt)
2304 checking = false;
2305 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2306 return outer_mask;
2307 stmt = as_a <gomp_for *> (ctx->stmt);
2310 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2312 switch (OMP_CLAUSE_CODE (c))
2314 case OMP_CLAUSE_GANG:
2315 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2316 break;
2317 case OMP_CLAUSE_WORKER:
2318 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2319 break;
2320 case OMP_CLAUSE_VECTOR:
2321 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2322 break;
2323 case OMP_CLAUSE_SEQ:
2324 has_seq = true;
2325 break;
2326 case OMP_CLAUSE_AUTO:
2327 has_auto = true;
2328 break;
2329 default:
2330 break;
2334 if (checking)
2336 if (has_seq && (this_mask || has_auto))
2337 error_at (gimple_location (stmt), "%<seq%> overrides other"
2338 " OpenACC loop specifiers");
2339 else if (has_auto && this_mask)
2340 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2341 " OpenACC loop specifiers");
2343 if (this_mask & outer_mask)
2344 error_at (gimple_location (stmt), "inner loop uses same"
2345 " OpenACC parallelism as containing loop");
2348 return outer_mask | this_mask;
2351 /* Scan a GIMPLE_OMP_FOR. */
2353 static omp_context *
2354 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2356 omp_context *ctx;
2357 size_t i;
2358 tree clauses = gimple_omp_for_clauses (stmt);
2360 ctx = new_omp_context (stmt, outer_ctx);
2362 if (is_gimple_omp_oacc (stmt))
2364 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2366 if (!tgt || is_oacc_parallel (tgt))
2367 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2369 char const *check = NULL;
2371 switch (OMP_CLAUSE_CODE (c))
2373 case OMP_CLAUSE_GANG:
2374 check = "gang";
2375 break;
2377 case OMP_CLAUSE_WORKER:
2378 check = "worker";
2379 break;
2381 case OMP_CLAUSE_VECTOR:
2382 check = "vector";
2383 break;
2385 default:
2386 break;
2389 if (check && OMP_CLAUSE_OPERAND (c, 0))
2390 error_at (gimple_location (stmt),
2391 "argument not permitted on %qs clause in"
2392 " OpenACC %<parallel%>", check);
2395 if (tgt && is_oacc_kernels (tgt))
2397 /* Strip out reductions, as they are not handled yet. */
2398 tree *prev_ptr = &clauses;
2400 while (tree probe = *prev_ptr)
2402 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2404 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2405 *prev_ptr = *next_ptr;
2406 else
2407 prev_ptr = next_ptr;
2410 gimple_omp_for_set_clauses (stmt, clauses);
2411 check_oacc_kernel_gwv (stmt, ctx);
2415 scan_sharing_clauses (clauses, ctx);
2417 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2418 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2420 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2421 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2422 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2423 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2425 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2426 return ctx;
2429 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2431 static void
2432 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2433 omp_context *outer_ctx)
2435 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2436 gsi_replace (gsi, bind, false);
2437 gimple_seq seq = NULL;
2438 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2439 tree cond = create_tmp_var_raw (integer_type_node);
2440 DECL_CONTEXT (cond) = current_function_decl;
2441 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2442 gimple_bind_set_vars (bind, cond);
2443 gimple_call_set_lhs (g, cond);
2444 gimple_seq_add_stmt (&seq, g);
2445 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2446 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2447 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2448 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2449 gimple_seq_add_stmt (&seq, g);
2450 g = gimple_build_label (lab1);
2451 gimple_seq_add_stmt (&seq, g);
2452 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2453 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2454 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2455 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2456 gimple_omp_for_set_clauses (new_stmt, clause);
2457 gimple_seq_add_stmt (&seq, new_stmt);
2458 g = gimple_build_goto (lab3);
2459 gimple_seq_add_stmt (&seq, g);
2460 g = gimple_build_label (lab2);
2461 gimple_seq_add_stmt (&seq, g);
2462 gimple_seq_add_stmt (&seq, stmt);
2463 g = gimple_build_label (lab3);
2464 gimple_seq_add_stmt (&seq, g);
2465 gimple_bind_set_body (bind, seq);
2466 update_stmt (bind);
2467 scan_omp_for (new_stmt, outer_ctx);
2468 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2471 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2472 struct walk_stmt_info *);
2473 static omp_context *maybe_lookup_ctx (gimple *);
2475 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2476 for scan phase loop. */
2478 static void
2479 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2480 omp_context *outer_ctx)
2482 /* The only change between inclusive and exclusive scan will be
2483 within the first simd loop, so just use inclusive in the
2484 worksharing loop. */
2485 outer_ctx->scan_inclusive = true;
2486 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2487 OMP_CLAUSE_DECL (c) = integer_zero_node;
2489 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2490 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2491 gsi_replace (gsi, input_stmt, false);
2492 gimple_seq input_body = NULL;
2493 gimple_seq_add_stmt (&input_body, stmt);
2494 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2496 gimple_stmt_iterator input1_gsi = gsi_none ();
2497 struct walk_stmt_info wi;
2498 memset (&wi, 0, sizeof (wi));
2499 wi.val_only = true;
2500 wi.info = (void *) &input1_gsi;
2501 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2502 gcc_assert (!gsi_end_p (input1_gsi));
2504 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2505 gsi_next (&input1_gsi);
2506 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2507 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2508 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2509 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2510 std::swap (input_stmt1, scan_stmt1);
2512 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2513 gimple_omp_set_body (input_stmt1, NULL);
2515 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2516 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2518 gimple_omp_set_body (input_stmt1, input_body1);
2519 gimple_omp_set_body (scan_stmt1, NULL);
2521 gimple_stmt_iterator input2_gsi = gsi_none ();
2522 memset (&wi, 0, sizeof (wi));
2523 wi.val_only = true;
2524 wi.info = (void *) &input2_gsi;
2525 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2526 NULL, &wi);
2527 gcc_assert (!gsi_end_p (input2_gsi));
2529 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2530 gsi_next (&input2_gsi);
2531 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2532 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2533 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2534 std::swap (input_stmt2, scan_stmt2);
2536 gimple_omp_set_body (input_stmt2, NULL);
2538 gimple_omp_set_body (input_stmt, input_body);
2539 gimple_omp_set_body (scan_stmt, scan_body);
2541 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2542 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2544 ctx = new_omp_context (scan_stmt, outer_ctx);
2545 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2547 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2550 /* Scan an OpenMP sections directive. */
2552 static void
2553 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2555 omp_context *ctx;
2557 ctx = new_omp_context (stmt, outer_ctx);
2558 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2559 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2562 /* Scan an OpenMP single directive. */
2564 static void
2565 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2567 omp_context *ctx;
2568 tree name;
2570 ctx = new_omp_context (stmt, outer_ctx);
2571 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2572 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2573 name = create_tmp_var_name (".omp_copy_s");
2574 name = build_decl (gimple_location (stmt),
2575 TYPE_DECL, name, ctx->record_type);
2576 TYPE_NAME (ctx->record_type) = name;
2578 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2579 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2581 if (TYPE_FIELDS (ctx->record_type) == NULL)
2582 ctx->record_type = NULL;
2583 else
2584 layout_type (ctx->record_type);
2587 /* Scan a GIMPLE_OMP_TARGET. */
2589 static void
2590 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2592 omp_context *ctx;
2593 tree name;
2594 bool offloaded = is_gimple_omp_offloaded (stmt);
2595 tree clauses = gimple_omp_target_clauses (stmt);
2597 ctx = new_omp_context (stmt, outer_ctx);
2598 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2599 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2600 name = create_tmp_var_name (".omp_data_t");
2601 name = build_decl (gimple_location (stmt),
2602 TYPE_DECL, name, ctx->record_type);
2603 DECL_ARTIFICIAL (name) = 1;
2604 DECL_NAMELESS (name) = 1;
2605 TYPE_NAME (ctx->record_type) = name;
2606 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2608 if (offloaded)
2610 create_omp_child_function (ctx, false);
2611 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2614 scan_sharing_clauses (clauses, ctx);
2615 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2617 if (TYPE_FIELDS (ctx->record_type) == NULL)
2618 ctx->record_type = ctx->receiver_decl = NULL;
2619 else
2621 TYPE_FIELDS (ctx->record_type)
2622 = nreverse (TYPE_FIELDS (ctx->record_type));
2623 if (flag_checking)
2625 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2626 for (tree field = TYPE_FIELDS (ctx->record_type);
2627 field;
2628 field = DECL_CHAIN (field))
2629 gcc_assert (DECL_ALIGN (field) == align);
2631 layout_type (ctx->record_type);
2632 if (offloaded)
2633 fixup_child_record_type (ctx);
2637 /* Scan an OpenMP teams directive. */
2639 static void
2640 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2642 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2644 if (!gimple_omp_teams_host (stmt))
2646 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2647 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2648 return;
2650 taskreg_contexts.safe_push (ctx);
2651 gcc_assert (taskreg_nesting_level == 1);
2652 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2653 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2654 tree name = create_tmp_var_name (".omp_data_s");
2655 name = build_decl (gimple_location (stmt),
2656 TYPE_DECL, name, ctx->record_type);
2657 DECL_ARTIFICIAL (name) = 1;
2658 DECL_NAMELESS (name) = 1;
2659 TYPE_NAME (ctx->record_type) = name;
2660 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2661 create_omp_child_function (ctx, false);
2662 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2664 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2665 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2667 if (TYPE_FIELDS (ctx->record_type) == NULL)
2668 ctx->record_type = ctx->receiver_decl = NULL;
2671 /* Check nesting restrictions. */
2672 static bool
2673 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2675 tree c;
2677 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2678 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2679 the original copy of its contents. */
2680 return true;
2682 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2683 inside an OpenACC CTX. */
2684 if (!(is_gimple_omp (stmt)
2685 && is_gimple_omp_oacc (stmt))
2686 /* Except for atomic codes that we share with OpenMP. */
2687 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2688 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2690 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2692 error_at (gimple_location (stmt),
2693 "non-OpenACC construct inside of OpenACC routine");
2694 return false;
2696 else
2697 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2698 if (is_gimple_omp (octx->stmt)
2699 && is_gimple_omp_oacc (octx->stmt))
2701 error_at (gimple_location (stmt),
2702 "non-OpenACC construct inside of OpenACC region");
2703 return false;
2707 if (ctx != NULL)
2709 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2710 && ctx->outer
2711 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2712 ctx = ctx->outer;
2713 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2714 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2715 && !ctx->loop_p)
2717 c = NULL_TREE;
2718 if (ctx->order_concurrent
2719 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2720 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2721 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2723 error_at (gimple_location (stmt),
2724 "OpenMP constructs other than %<parallel%>, %<loop%>"
2725 " or %<simd%> may not be nested inside a region with"
2726 " the %<order(concurrent)%> clause");
2727 return false;
2729 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2731 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2732 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2734 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2735 && (ctx->outer == NULL
2736 || !gimple_omp_for_combined_into_p (ctx->stmt)
2737 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2738 || (gimple_omp_for_kind (ctx->outer->stmt)
2739 != GF_OMP_FOR_KIND_FOR)
2740 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2742 error_at (gimple_location (stmt),
2743 "%<ordered simd threads%> must be closely "
2744 "nested inside of %<for simd%> region");
2745 return false;
2747 return true;
2750 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2751 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2752 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2753 return true;
2754 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2755 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2756 return true;
2757 error_at (gimple_location (stmt),
2758 "OpenMP constructs other than "
2759 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2760 "not be nested inside %<simd%> region");
2761 return false;
2763 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2765 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2766 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2767 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2768 && omp_find_clause (gimple_omp_for_clauses (stmt),
2769 OMP_CLAUSE_BIND) == NULL_TREE))
2770 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2772 error_at (gimple_location (stmt),
2773 "only %<distribute%>, %<parallel%> or %<loop%> "
2774 "regions are allowed to be strictly nested inside "
2775 "%<teams%> region");
2776 return false;
2779 else if (ctx->order_concurrent
2780 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2781 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2782 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2783 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2785 if (ctx->loop_p)
2786 error_at (gimple_location (stmt),
2787 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2788 "%<simd%> may not be nested inside a %<loop%> region");
2789 else
2790 error_at (gimple_location (stmt),
2791 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2792 "%<simd%> may not be nested inside a region with "
2793 "the %<order(concurrent)%> clause");
2794 return false;
2797 switch (gimple_code (stmt))
2799 case GIMPLE_OMP_FOR:
2800 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2801 return true;
2802 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2804 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2806 error_at (gimple_location (stmt),
2807 "%<distribute%> region must be strictly nested "
2808 "inside %<teams%> construct");
2809 return false;
2811 return true;
2813 /* We split taskloop into task and nested taskloop in it. */
2814 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2815 return true;
2816 /* For now, hope this will change and loop bind(parallel) will not
2817 be allowed in lots of contexts. */
2818 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2819 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2820 return true;
2821 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2823 bool ok = false;
2825 if (ctx)
2826 switch (gimple_code (ctx->stmt))
2828 case GIMPLE_OMP_FOR:
2829 ok = (gimple_omp_for_kind (ctx->stmt)
2830 == GF_OMP_FOR_KIND_OACC_LOOP);
2831 break;
2833 case GIMPLE_OMP_TARGET:
2834 switch (gimple_omp_target_kind (ctx->stmt))
2836 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2837 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2838 ok = true;
2839 break;
2841 default:
2842 break;
2845 default:
2846 break;
2848 else if (oacc_get_fn_attrib (current_function_decl))
2849 ok = true;
2850 if (!ok)
2852 error_at (gimple_location (stmt),
2853 "OpenACC loop directive must be associated with"
2854 " an OpenACC compute region");
2855 return false;
2858 /* FALLTHRU */
2859 case GIMPLE_CALL:
2860 if (is_gimple_call (stmt)
2861 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2862 == BUILT_IN_GOMP_CANCEL
2863 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2864 == BUILT_IN_GOMP_CANCELLATION_POINT))
2866 const char *bad = NULL;
2867 const char *kind = NULL;
2868 const char *construct
2869 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2870 == BUILT_IN_GOMP_CANCEL)
2871 ? "cancel"
2872 : "cancellation point";
2873 if (ctx == NULL)
2875 error_at (gimple_location (stmt), "orphaned %qs construct",
2876 construct);
2877 return false;
2879 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2880 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2881 : 0)
2883 case 1:
2884 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2885 bad = "parallel";
2886 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2887 == BUILT_IN_GOMP_CANCEL
2888 && !integer_zerop (gimple_call_arg (stmt, 1)))
2889 ctx->cancellable = true;
2890 kind = "parallel";
2891 break;
2892 case 2:
2893 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2894 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2895 bad = "for";
2896 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2897 == BUILT_IN_GOMP_CANCEL
2898 && !integer_zerop (gimple_call_arg (stmt, 1)))
2900 ctx->cancellable = true;
2901 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2902 OMP_CLAUSE_NOWAIT))
2903 warning_at (gimple_location (stmt), 0,
2904 "%<cancel for%> inside "
2905 "%<nowait%> for construct");
2906 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2907 OMP_CLAUSE_ORDERED))
2908 warning_at (gimple_location (stmt), 0,
2909 "%<cancel for%> inside "
2910 "%<ordered%> for construct");
2912 kind = "for";
2913 break;
2914 case 4:
2915 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2916 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2917 bad = "sections";
2918 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2919 == BUILT_IN_GOMP_CANCEL
2920 && !integer_zerop (gimple_call_arg (stmt, 1)))
2922 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2924 ctx->cancellable = true;
2925 if (omp_find_clause (gimple_omp_sections_clauses
2926 (ctx->stmt),
2927 OMP_CLAUSE_NOWAIT))
2928 warning_at (gimple_location (stmt), 0,
2929 "%<cancel sections%> inside "
2930 "%<nowait%> sections construct");
2932 else
2934 gcc_assert (ctx->outer
2935 && gimple_code (ctx->outer->stmt)
2936 == GIMPLE_OMP_SECTIONS);
2937 ctx->outer->cancellable = true;
2938 if (omp_find_clause (gimple_omp_sections_clauses
2939 (ctx->outer->stmt),
2940 OMP_CLAUSE_NOWAIT))
2941 warning_at (gimple_location (stmt), 0,
2942 "%<cancel sections%> inside "
2943 "%<nowait%> sections construct");
2946 kind = "sections";
2947 break;
2948 case 8:
2949 if (!is_task_ctx (ctx)
2950 && (!is_taskloop_ctx (ctx)
2951 || ctx->outer == NULL
2952 || !is_task_ctx (ctx->outer)))
2953 bad = "task";
2954 else
2956 for (omp_context *octx = ctx->outer;
2957 octx; octx = octx->outer)
2959 switch (gimple_code (octx->stmt))
2961 case GIMPLE_OMP_TASKGROUP:
2962 break;
2963 case GIMPLE_OMP_TARGET:
2964 if (gimple_omp_target_kind (octx->stmt)
2965 != GF_OMP_TARGET_KIND_REGION)
2966 continue;
2967 /* FALLTHRU */
2968 case GIMPLE_OMP_PARALLEL:
2969 case GIMPLE_OMP_TEAMS:
2970 error_at (gimple_location (stmt),
2971 "%<%s taskgroup%> construct not closely "
2972 "nested inside of %<taskgroup%> region",
2973 construct);
2974 return false;
2975 case GIMPLE_OMP_TASK:
2976 if (gimple_omp_task_taskloop_p (octx->stmt)
2977 && octx->outer
2978 && is_taskloop_ctx (octx->outer))
2980 tree clauses
2981 = gimple_omp_for_clauses (octx->outer->stmt);
2982 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2983 break;
2985 continue;
2986 default:
2987 continue;
2989 break;
2991 ctx->cancellable = true;
2993 kind = "taskgroup";
2994 break;
2995 default:
2996 error_at (gimple_location (stmt), "invalid arguments");
2997 return false;
2999 if (bad)
3001 error_at (gimple_location (stmt),
3002 "%<%s %s%> construct not closely nested inside of %qs",
3003 construct, kind, bad);
3004 return false;
3007 /* FALLTHRU */
3008 case GIMPLE_OMP_SECTIONS:
3009 case GIMPLE_OMP_SINGLE:
3010 for (; ctx != NULL; ctx = ctx->outer)
3011 switch (gimple_code (ctx->stmt))
3013 case GIMPLE_OMP_FOR:
3014 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3015 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3016 break;
3017 /* FALLTHRU */
3018 case GIMPLE_OMP_SECTIONS:
3019 case GIMPLE_OMP_SINGLE:
3020 case GIMPLE_OMP_ORDERED:
3021 case GIMPLE_OMP_MASTER:
3022 case GIMPLE_OMP_TASK:
3023 case GIMPLE_OMP_CRITICAL:
3024 if (is_gimple_call (stmt))
3026 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3027 != BUILT_IN_GOMP_BARRIER)
3028 return true;
3029 error_at (gimple_location (stmt),
3030 "barrier region may not be closely nested inside "
3031 "of work-sharing, %<loop%>, %<critical%>, "
3032 "%<ordered%>, %<master%>, explicit %<task%> or "
3033 "%<taskloop%> region");
3034 return false;
3036 error_at (gimple_location (stmt),
3037 "work-sharing region may not be closely nested inside "
3038 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3039 "%<master%>, explicit %<task%> or %<taskloop%> region");
3040 return false;
3041 case GIMPLE_OMP_PARALLEL:
3042 case GIMPLE_OMP_TEAMS:
3043 return true;
3044 case GIMPLE_OMP_TARGET:
3045 if (gimple_omp_target_kind (ctx->stmt)
3046 == GF_OMP_TARGET_KIND_REGION)
3047 return true;
3048 break;
3049 default:
3050 break;
3052 break;
3053 case GIMPLE_OMP_MASTER:
3054 for (; ctx != NULL; ctx = ctx->outer)
3055 switch (gimple_code (ctx->stmt))
3057 case GIMPLE_OMP_FOR:
3058 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3059 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3060 break;
3061 /* FALLTHRU */
3062 case GIMPLE_OMP_SECTIONS:
3063 case GIMPLE_OMP_SINGLE:
3064 case GIMPLE_OMP_TASK:
3065 error_at (gimple_location (stmt),
3066 "%<master%> region may not be closely nested inside "
3067 "of work-sharing, %<loop%>, explicit %<task%> or "
3068 "%<taskloop%> region");
3069 return false;
3070 case GIMPLE_OMP_PARALLEL:
3071 case GIMPLE_OMP_TEAMS:
3072 return true;
3073 case GIMPLE_OMP_TARGET:
3074 if (gimple_omp_target_kind (ctx->stmt)
3075 == GF_OMP_TARGET_KIND_REGION)
3076 return true;
3077 break;
3078 default:
3079 break;
3081 break;
3082 case GIMPLE_OMP_TASK:
3083 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3084 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3085 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3086 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3088 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3089 error_at (OMP_CLAUSE_LOCATION (c),
3090 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3091 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3092 return false;
3094 break;
3095 case GIMPLE_OMP_ORDERED:
3096 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3097 c; c = OMP_CLAUSE_CHAIN (c))
3099 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3101 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3102 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3103 continue;
3105 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3106 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3107 || kind == OMP_CLAUSE_DEPEND_SINK)
3109 tree oclause;
3110 /* Look for containing ordered(N) loop. */
3111 if (ctx == NULL
3112 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3113 || (oclause
3114 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3115 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3117 error_at (OMP_CLAUSE_LOCATION (c),
3118 "%<ordered%> construct with %<depend%> clause "
3119 "must be closely nested inside an %<ordered%> "
3120 "loop");
3121 return false;
3123 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3125 error_at (OMP_CLAUSE_LOCATION (c),
3126 "%<ordered%> construct with %<depend%> clause "
3127 "must be closely nested inside a loop with "
3128 "%<ordered%> clause with a parameter");
3129 return false;
3132 else
3134 error_at (OMP_CLAUSE_LOCATION (c),
3135 "invalid depend kind in omp %<ordered%> %<depend%>");
3136 return false;
3139 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3140 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3142 /* ordered simd must be closely nested inside of simd region,
3143 and simd region must not encounter constructs other than
3144 ordered simd, therefore ordered simd may be either orphaned,
3145 or ctx->stmt must be simd. The latter case is handled already
3146 earlier. */
3147 if (ctx != NULL)
3149 error_at (gimple_location (stmt),
3150 "%<ordered%> %<simd%> must be closely nested inside "
3151 "%<simd%> region");
3152 return false;
3155 for (; ctx != NULL; ctx = ctx->outer)
3156 switch (gimple_code (ctx->stmt))
3158 case GIMPLE_OMP_CRITICAL:
3159 case GIMPLE_OMP_TASK:
3160 case GIMPLE_OMP_ORDERED:
3161 ordered_in_taskloop:
3162 error_at (gimple_location (stmt),
3163 "%<ordered%> region may not be closely nested inside "
3164 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3165 "%<taskloop%> region");
3166 return false;
3167 case GIMPLE_OMP_FOR:
3168 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3169 goto ordered_in_taskloop;
3170 tree o;
3171 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3172 OMP_CLAUSE_ORDERED);
3173 if (o == NULL)
3175 error_at (gimple_location (stmt),
3176 "%<ordered%> region must be closely nested inside "
3177 "a loop region with an %<ordered%> clause");
3178 return false;
3180 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3181 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3183 error_at (gimple_location (stmt),
3184 "%<ordered%> region without %<depend%> clause may "
3185 "not be closely nested inside a loop region with "
3186 "an %<ordered%> clause with a parameter");
3187 return false;
3189 return true;
3190 case GIMPLE_OMP_TARGET:
3191 if (gimple_omp_target_kind (ctx->stmt)
3192 != GF_OMP_TARGET_KIND_REGION)
3193 break;
3194 /* FALLTHRU */
3195 case GIMPLE_OMP_PARALLEL:
3196 case GIMPLE_OMP_TEAMS:
3197 error_at (gimple_location (stmt),
3198 "%<ordered%> region must be closely nested inside "
3199 "a loop region with an %<ordered%> clause");
3200 return false;
3201 default:
3202 break;
3204 break;
3205 case GIMPLE_OMP_CRITICAL:
3207 tree this_stmt_name
3208 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3209 for (; ctx != NULL; ctx = ctx->outer)
3210 if (gomp_critical *other_crit
3211 = dyn_cast <gomp_critical *> (ctx->stmt))
3212 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3214 error_at (gimple_location (stmt),
3215 "%<critical%> region may not be nested inside "
3216 "a %<critical%> region with the same name");
3217 return false;
3220 break;
3221 case GIMPLE_OMP_TEAMS:
3222 if (ctx == NULL)
3223 break;
3224 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3225 || (gimple_omp_target_kind (ctx->stmt)
3226 != GF_OMP_TARGET_KIND_REGION))
3228 /* Teams construct can appear either strictly nested inside of
3229 target construct with no intervening stmts, or can be encountered
3230 only by initial task (so must not appear inside any OpenMP
3231 construct. */
3232 error_at (gimple_location (stmt),
3233 "%<teams%> construct must be closely nested inside of "
3234 "%<target%> construct or not nested in any OpenMP "
3235 "construct");
3236 return false;
3238 break;
3239 case GIMPLE_OMP_TARGET:
3240 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3241 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3242 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3243 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3245 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3246 error_at (OMP_CLAUSE_LOCATION (c),
3247 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3248 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3249 return false;
3251 if (is_gimple_omp_offloaded (stmt)
3252 && oacc_get_fn_attrib (cfun->decl) != NULL)
3254 error_at (gimple_location (stmt),
3255 "OpenACC region inside of OpenACC routine, nested "
3256 "parallelism not supported yet");
3257 return false;
3259 for (; ctx != NULL; ctx = ctx->outer)
3261 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3263 if (is_gimple_omp (stmt)
3264 && is_gimple_omp_oacc (stmt)
3265 && is_gimple_omp (ctx->stmt))
3267 error_at (gimple_location (stmt),
3268 "OpenACC construct inside of non-OpenACC region");
3269 return false;
3271 continue;
3274 const char *stmt_name, *ctx_stmt_name;
3275 switch (gimple_omp_target_kind (stmt))
3277 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3278 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3279 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3280 case GF_OMP_TARGET_KIND_ENTER_DATA:
3281 stmt_name = "target enter data"; break;
3282 case GF_OMP_TARGET_KIND_EXIT_DATA:
3283 stmt_name = "target exit data"; break;
3284 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3285 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3286 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3287 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3288 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3289 stmt_name = "enter/exit data"; break;
3290 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3291 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3292 break;
3293 default: gcc_unreachable ();
3295 switch (gimple_omp_target_kind (ctx->stmt))
3297 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3298 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3299 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3300 ctx_stmt_name = "parallel"; break;
3301 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3302 ctx_stmt_name = "kernels"; break;
3303 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3304 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3305 ctx_stmt_name = "host_data"; break;
3306 default: gcc_unreachable ();
3309 /* OpenACC/OpenMP mismatch? */
3310 if (is_gimple_omp_oacc (stmt)
3311 != is_gimple_omp_oacc (ctx->stmt))
3313 error_at (gimple_location (stmt),
3314 "%s %qs construct inside of %s %qs region",
3315 (is_gimple_omp_oacc (stmt)
3316 ? "OpenACC" : "OpenMP"), stmt_name,
3317 (is_gimple_omp_oacc (ctx->stmt)
3318 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3319 return false;
3321 if (is_gimple_omp_offloaded (ctx->stmt))
3323 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3324 if (is_gimple_omp_oacc (ctx->stmt))
3326 error_at (gimple_location (stmt),
3327 "%qs construct inside of %qs region",
3328 stmt_name, ctx_stmt_name);
3329 return false;
3331 else
3333 warning_at (gimple_location (stmt), 0,
3334 "%qs construct inside of %qs region",
3335 stmt_name, ctx_stmt_name);
3339 break;
3340 default:
3341 break;
3343 return true;
3347 /* Helper function scan_omp.
3349 Callback for walk_tree or operators in walk_gimple_stmt used to
3350 scan for OMP directives in TP. */
3352 static tree
3353 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3355 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3356 omp_context *ctx = (omp_context *) wi->info;
3357 tree t = *tp;
3359 switch (TREE_CODE (t))
3361 case VAR_DECL:
3362 case PARM_DECL:
3363 case LABEL_DECL:
3364 case RESULT_DECL:
3365 if (ctx)
3367 tree repl = remap_decl (t, &ctx->cb);
3368 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3369 *tp = repl;
3371 break;
3373 default:
3374 if (ctx && TYPE_P (t))
3375 *tp = remap_type (t, &ctx->cb);
3376 else if (!DECL_P (t))
3378 *walk_subtrees = 1;
3379 if (ctx)
3381 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3382 if (tem != TREE_TYPE (t))
3384 if (TREE_CODE (t) == INTEGER_CST)
3385 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3386 else
3387 TREE_TYPE (t) = tem;
3391 break;
3394 return NULL_TREE;
3397 /* Return true if FNDECL is a setjmp or a longjmp. */
3399 static bool
3400 setjmp_or_longjmp_p (const_tree fndecl)
3402 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3403 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3404 return true;
3406 tree declname = DECL_NAME (fndecl);
3407 if (!declname
3408 || (DECL_CONTEXT (fndecl) != NULL_TREE
3409 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3410 || !TREE_PUBLIC (fndecl))
3411 return false;
3413 const char *name = IDENTIFIER_POINTER (declname);
3414 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3417 /* Return true if FNDECL is an omp_* runtime API call. */
3419 static bool
3420 omp_runtime_api_call (const_tree fndecl)
3422 tree declname = DECL_NAME (fndecl);
3423 if (!declname
3424 || (DECL_CONTEXT (fndecl) != NULL_TREE
3425 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3426 || !TREE_PUBLIC (fndecl))
3427 return false;
3429 const char *name = IDENTIFIER_POINTER (declname);
3430 if (strncmp (name, "omp_", 4) != 0)
3431 return false;
3433 static const char *omp_runtime_apis[] =
3435 /* This array has 3 sections. First omp_* calls that don't
3436 have any suffixes. */
3437 "target_alloc",
3438 "target_associate_ptr",
3439 "target_disassociate_ptr",
3440 "target_free",
3441 "target_is_present",
3442 "target_memcpy",
3443 "target_memcpy_rect",
3444 NULL,
3445 /* Now omp_* calls that are available as omp_* and omp_*_. */
3446 "capture_affinity",
3447 "destroy_lock",
3448 "destroy_nest_lock",
3449 "display_affinity",
3450 "get_active_level",
3451 "get_affinity_format",
3452 "get_cancellation",
3453 "get_default_device",
3454 "get_dynamic",
3455 "get_initial_device",
3456 "get_level",
3457 "get_max_active_levels",
3458 "get_max_task_priority",
3459 "get_max_threads",
3460 "get_nested",
3461 "get_num_devices",
3462 "get_num_places",
3463 "get_num_procs",
3464 "get_num_teams",
3465 "get_num_threads",
3466 "get_partition_num_places",
3467 "get_place_num",
3468 "get_proc_bind",
3469 "get_team_num",
3470 "get_thread_limit",
3471 "get_thread_num",
3472 "get_wtick",
3473 "get_wtime",
3474 "in_final",
3475 "in_parallel",
3476 "init_lock",
3477 "init_nest_lock",
3478 "is_initial_device",
3479 "pause_resource",
3480 "pause_resource_all",
3481 "set_affinity_format",
3482 "set_lock",
3483 "set_nest_lock",
3484 "test_lock",
3485 "test_nest_lock",
3486 "unset_lock",
3487 "unset_nest_lock",
3488 NULL,
3489 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3490 "get_ancestor_thread_num",
3491 "get_partition_place_nums",
3492 "get_place_num_procs",
3493 "get_place_proc_ids",
3494 "get_schedule",
3495 "get_team_size",
3496 "set_default_device",
3497 "set_dynamic",
3498 "set_max_active_levels",
3499 "set_nested",
3500 "set_num_threads",
3501 "set_schedule"
3504 int mode = 0;
3505 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3507 if (omp_runtime_apis[i] == NULL)
3509 mode++;
3510 continue;
3512 size_t len = strlen (omp_runtime_apis[i]);
3513 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3514 && (name[4 + len] == '\0'
3515 || (mode > 0
3516 && name[4 + len] == '_'
3517 && (name[4 + len + 1] == '\0'
3518 || (mode > 1
3519 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3520 return true;
3522 return false;
3525 /* Helper function for scan_omp.
3527 Callback for walk_gimple_stmt used to scan for OMP directives in
3528 the current statement in GSI. */
3530 static tree
3531 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3532 struct walk_stmt_info *wi)
3534 gimple *stmt = gsi_stmt (*gsi);
3535 omp_context *ctx = (omp_context *) wi->info;
3537 if (gimple_has_location (stmt))
3538 input_location = gimple_location (stmt);
3540 /* Check the nesting restrictions. */
3541 bool remove = false;
3542 if (is_gimple_omp (stmt))
3543 remove = !check_omp_nesting_restrictions (stmt, ctx);
3544 else if (is_gimple_call (stmt))
3546 tree fndecl = gimple_call_fndecl (stmt);
3547 if (fndecl)
3549 if (ctx
3550 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3551 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3552 && setjmp_or_longjmp_p (fndecl)
3553 && !ctx->loop_p)
3555 remove = true;
3556 error_at (gimple_location (stmt),
3557 "setjmp/longjmp inside %<simd%> construct");
3559 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3560 switch (DECL_FUNCTION_CODE (fndecl))
3562 case BUILT_IN_GOMP_BARRIER:
3563 case BUILT_IN_GOMP_CANCEL:
3564 case BUILT_IN_GOMP_CANCELLATION_POINT:
3565 case BUILT_IN_GOMP_TASKYIELD:
3566 case BUILT_IN_GOMP_TASKWAIT:
3567 case BUILT_IN_GOMP_TASKGROUP_START:
3568 case BUILT_IN_GOMP_TASKGROUP_END:
3569 remove = !check_omp_nesting_restrictions (stmt, ctx);
3570 break;
3571 default:
3572 break;
3574 else if (ctx)
3576 omp_context *octx = ctx;
3577 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3578 octx = ctx->outer;
3579 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3581 remove = true;
3582 error_at (gimple_location (stmt),
3583 "OpenMP runtime API call %qD in a region with "
3584 "%<order(concurrent)%> clause", fndecl);
3589 if (remove)
3591 stmt = gimple_build_nop ();
3592 gsi_replace (gsi, stmt, false);
3595 *handled_ops_p = true;
3597 switch (gimple_code (stmt))
3599 case GIMPLE_OMP_PARALLEL:
3600 taskreg_nesting_level++;
3601 scan_omp_parallel (gsi, ctx);
3602 taskreg_nesting_level--;
3603 break;
3605 case GIMPLE_OMP_TASK:
3606 taskreg_nesting_level++;
3607 scan_omp_task (gsi, ctx);
3608 taskreg_nesting_level--;
3609 break;
3611 case GIMPLE_OMP_FOR:
3612 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3613 == GF_OMP_FOR_KIND_SIMD)
3614 && gimple_omp_for_combined_into_p (stmt)
3615 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3617 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3618 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3619 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3621 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3622 break;
3625 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3626 == GF_OMP_FOR_KIND_SIMD)
3627 && omp_maybe_offloaded_ctx (ctx)
3628 && omp_max_simt_vf ())
3629 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3630 else
3631 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3632 break;
3634 case GIMPLE_OMP_SECTIONS:
3635 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3636 break;
3638 case GIMPLE_OMP_SINGLE:
3639 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3640 break;
3642 case GIMPLE_OMP_SCAN:
3643 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3645 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3646 ctx->scan_inclusive = true;
3647 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3648 ctx->scan_exclusive = true;
3650 /* FALLTHRU */
3651 case GIMPLE_OMP_SECTION:
3652 case GIMPLE_OMP_MASTER:
3653 case GIMPLE_OMP_ORDERED:
3654 case GIMPLE_OMP_CRITICAL:
3655 case GIMPLE_OMP_GRID_BODY:
3656 ctx = new_omp_context (stmt, ctx);
3657 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3658 break;
3660 case GIMPLE_OMP_TASKGROUP:
3661 ctx = new_omp_context (stmt, ctx);
3662 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3663 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3664 break;
3666 case GIMPLE_OMP_TARGET:
3667 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3668 break;
3670 case GIMPLE_OMP_TEAMS:
3671 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3673 taskreg_nesting_level++;
3674 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3675 taskreg_nesting_level--;
3677 else
3678 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3679 break;
3681 case GIMPLE_BIND:
3683 tree var;
3685 *handled_ops_p = false;
3686 if (ctx)
3687 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3688 var ;
3689 var = DECL_CHAIN (var))
3690 insert_decl_map (&ctx->cb, var, var);
3692 break;
3693 default:
3694 *handled_ops_p = false;
3695 break;
3698 return NULL_TREE;
3702 /* Scan all the statements starting at the current statement. CTX
3703 contains context information about the OMP directives and
3704 clauses found during the scan. */
3706 static void
3707 scan_omp (gimple_seq *body_p, omp_context *ctx)
3709 location_t saved_location;
3710 struct walk_stmt_info wi;
3712 memset (&wi, 0, sizeof (wi));
3713 wi.info = ctx;
3714 wi.want_locations = true;
3716 saved_location = input_location;
3717 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3718 input_location = saved_location;
3721 /* Re-gimplification and code generation routines. */
3723 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3724 of BIND if in a method. */
3726 static void
3727 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3729 if (DECL_ARGUMENTS (current_function_decl)
3730 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3731 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3732 == POINTER_TYPE))
3734 tree vars = gimple_bind_vars (bind);
3735 for (tree *pvar = &vars; *pvar; )
3736 if (omp_member_access_dummy_var (*pvar))
3737 *pvar = DECL_CHAIN (*pvar);
3738 else
3739 pvar = &DECL_CHAIN (*pvar);
3740 gimple_bind_set_vars (bind, vars);
3744 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3745 block and its subblocks. */
3747 static void
3748 remove_member_access_dummy_vars (tree block)
3750 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3751 if (omp_member_access_dummy_var (*pvar))
3752 *pvar = DECL_CHAIN (*pvar);
3753 else
3754 pvar = &DECL_CHAIN (*pvar);
3756 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3757 remove_member_access_dummy_vars (block);
3760 /* If a context was created for STMT when it was scanned, return it. */
3762 static omp_context *
3763 maybe_lookup_ctx (gimple *stmt)
3765 splay_tree_node n;
3766 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3767 return n ? (omp_context *) n->value : NULL;
3771 /* Find the mapping for DECL in CTX or the immediately enclosing
3772 context that has a mapping for DECL.
3774 If CTX is a nested parallel directive, we may have to use the decl
3775 mappings created in CTX's parent context. Suppose that we have the
3776 following parallel nesting (variable UIDs showed for clarity):
3778 iD.1562 = 0;
3779 #omp parallel shared(iD.1562) -> outer parallel
3780 iD.1562 = iD.1562 + 1;
3782 #omp parallel shared (iD.1562) -> inner parallel
3783 iD.1562 = iD.1562 - 1;
3785 Each parallel structure will create a distinct .omp_data_s structure
3786 for copying iD.1562 in/out of the directive:
3788 outer parallel .omp_data_s.1.i -> iD.1562
3789 inner parallel .omp_data_s.2.i -> iD.1562
3791 A shared variable mapping will produce a copy-out operation before
3792 the parallel directive and a copy-in operation after it. So, in
3793 this case we would have:
3795 iD.1562 = 0;
3796 .omp_data_o.1.i = iD.1562;
3797 #omp parallel shared(iD.1562) -> outer parallel
3798 .omp_data_i.1 = &.omp_data_o.1
3799 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3801 .omp_data_o.2.i = iD.1562; -> **
3802 #omp parallel shared(iD.1562) -> inner parallel
3803 .omp_data_i.2 = &.omp_data_o.2
3804 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3807 ** This is a problem. The symbol iD.1562 cannot be referenced
3808 inside the body of the outer parallel region. But since we are
3809 emitting this copy operation while expanding the inner parallel
3810 directive, we need to access the CTX structure of the outer
3811 parallel directive to get the correct mapping:
3813 .omp_data_o.2.i = .omp_data_i.1->i
3815 Since there may be other workshare or parallel directives enclosing
3816 the parallel directive, it may be necessary to walk up the context
3817 parent chain. This is not a problem in general because nested
3818 parallelism happens only rarely. */
3820 static tree
3821 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3823 tree t;
3824 omp_context *up;
3826 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3827 t = maybe_lookup_decl (decl, up);
3829 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3831 return t ? t : decl;
3835 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3836 in outer contexts. */
3838 static tree
3839 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3841 tree t = NULL;
3842 omp_context *up;
3844 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3845 t = maybe_lookup_decl (decl, up);
3847 return t ? t : decl;
3851 /* Construct the initialization value for reduction operation OP. */
3853 tree
3854 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3856 switch (op)
3858 case PLUS_EXPR:
3859 case MINUS_EXPR:
3860 case BIT_IOR_EXPR:
3861 case BIT_XOR_EXPR:
3862 case TRUTH_OR_EXPR:
3863 case TRUTH_ORIF_EXPR:
3864 case TRUTH_XOR_EXPR:
3865 case NE_EXPR:
3866 return build_zero_cst (type);
3868 case MULT_EXPR:
3869 case TRUTH_AND_EXPR:
3870 case TRUTH_ANDIF_EXPR:
3871 case EQ_EXPR:
3872 return fold_convert_loc (loc, type, integer_one_node);
3874 case BIT_AND_EXPR:
3875 return fold_convert_loc (loc, type, integer_minus_one_node);
3877 case MAX_EXPR:
3878 if (SCALAR_FLOAT_TYPE_P (type))
3880 REAL_VALUE_TYPE max, min;
3881 if (HONOR_INFINITIES (type))
3883 real_inf (&max);
3884 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3886 else
3887 real_maxval (&min, 1, TYPE_MODE (type));
3888 return build_real (type, min);
3890 else if (POINTER_TYPE_P (type))
3892 wide_int min
3893 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3894 return wide_int_to_tree (type, min);
3896 else
3898 gcc_assert (INTEGRAL_TYPE_P (type));
3899 return TYPE_MIN_VALUE (type);
3902 case MIN_EXPR:
3903 if (SCALAR_FLOAT_TYPE_P (type))
3905 REAL_VALUE_TYPE max;
3906 if (HONOR_INFINITIES (type))
3907 real_inf (&max);
3908 else
3909 real_maxval (&max, 0, TYPE_MODE (type));
3910 return build_real (type, max);
3912 else if (POINTER_TYPE_P (type))
3914 wide_int max
3915 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3916 return wide_int_to_tree (type, max);
3918 else
3920 gcc_assert (INTEGRAL_TYPE_P (type));
3921 return TYPE_MAX_VALUE (type);
3924 default:
3925 gcc_unreachable ();
3929 /* Construct the initialization value for reduction CLAUSE. */
3931 tree
3932 omp_reduction_init (tree clause, tree type)
3934 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3935 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3938 /* Return alignment to be assumed for var in CLAUSE, which should be
3939 OMP_CLAUSE_ALIGNED. */
3941 static tree
3942 omp_clause_aligned_alignment (tree clause)
3944 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3945 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3947 /* Otherwise return implementation defined alignment. */
3948 unsigned int al = 1;
3949 opt_scalar_mode mode_iter;
3950 auto_vector_sizes sizes;
3951 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3952 poly_uint64 vs = 0;
3953 for (unsigned int i = 0; i < sizes.length (); ++i)
3954 vs = ordered_max (vs, sizes[i]);
3955 static enum mode_class classes[]
3956 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3957 for (int i = 0; i < 4; i += 2)
3958 /* The for loop above dictates that we only walk through scalar classes. */
3959 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3961 scalar_mode mode = mode_iter.require ();
3962 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3963 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3964 continue;
3965 while (maybe_ne (vs, 0U)
3966 && known_lt (GET_MODE_SIZE (vmode), vs)
3967 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3968 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3970 tree type = lang_hooks.types.type_for_mode (mode, 1);
3971 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3972 continue;
3973 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3974 GET_MODE_SIZE (mode));
3975 type = build_vector_type (type, nelts);
3976 if (TYPE_MODE (type) != vmode)
3977 continue;
3978 if (TYPE_ALIGN_UNIT (type) > al)
3979 al = TYPE_ALIGN_UNIT (type);
3981 return build_int_cst (integer_type_node, al);
3985 /* This structure is part of the interface between lower_rec_simd_input_clauses
3986 and lower_rec_input_clauses. */
3988 class omplow_simd_context {
3989 public:
3990 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3991 tree idx;
3992 tree lane;
3993 tree lastlane;
3994 vec<tree, va_heap> simt_eargs;
3995 gimple_seq simt_dlist;
3996 poly_uint64_pod max_vf;
3997 bool is_simt;
4000 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4001 privatization. */
4003 static bool
4004 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4005 omplow_simd_context *sctx, tree &ivar,
4006 tree &lvar, tree *rvar = NULL,
4007 tree *rvar2 = NULL)
4009 if (known_eq (sctx->max_vf, 0U))
4011 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4012 if (maybe_gt (sctx->max_vf, 1U))
4014 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4015 OMP_CLAUSE_SAFELEN);
4016 if (c)
4018 poly_uint64 safe_len;
4019 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4020 || maybe_lt (safe_len, 1U))
4021 sctx->max_vf = 1;
4022 else
4023 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4026 if (maybe_gt (sctx->max_vf, 1U))
4028 sctx->idx = create_tmp_var (unsigned_type_node);
4029 sctx->lane = create_tmp_var (unsigned_type_node);
4032 if (known_eq (sctx->max_vf, 1U))
4033 return false;
4035 if (sctx->is_simt)
4037 if (is_gimple_reg (new_var))
4039 ivar = lvar = new_var;
4040 return true;
4042 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4043 ivar = lvar = create_tmp_var (type);
4044 TREE_ADDRESSABLE (ivar) = 1;
4045 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4046 NULL, DECL_ATTRIBUTES (ivar));
4047 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4048 tree clobber = build_clobber (type);
4049 gimple *g = gimple_build_assign (ivar, clobber);
4050 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4052 else
4054 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4055 tree avar = create_tmp_var_raw (atype);
4056 if (TREE_ADDRESSABLE (new_var))
4057 TREE_ADDRESSABLE (avar) = 1;
4058 DECL_ATTRIBUTES (avar)
4059 = tree_cons (get_identifier ("omp simd array"), NULL,
4060 DECL_ATTRIBUTES (avar));
4061 gimple_add_tmp_var (avar);
4062 tree iavar = avar;
4063 if (rvar && !ctx->for_simd_scan_phase)
4065 /* For inscan reductions, create another array temporary,
4066 which will hold the reduced value. */
4067 iavar = create_tmp_var_raw (atype);
4068 if (TREE_ADDRESSABLE (new_var))
4069 TREE_ADDRESSABLE (iavar) = 1;
4070 DECL_ATTRIBUTES (iavar)
4071 = tree_cons (get_identifier ("omp simd array"), NULL,
4072 tree_cons (get_identifier ("omp simd inscan"), NULL,
4073 DECL_ATTRIBUTES (iavar)));
4074 gimple_add_tmp_var (iavar);
4075 ctx->cb.decl_map->put (avar, iavar);
4076 if (sctx->lastlane == NULL_TREE)
4077 sctx->lastlane = create_tmp_var (unsigned_type_node);
4078 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4079 sctx->lastlane, NULL_TREE, NULL_TREE);
4080 TREE_THIS_NOTRAP (*rvar) = 1;
4082 if (ctx->scan_exclusive)
4084 /* And for exclusive scan yet another one, which will
4085 hold the value during the scan phase. */
4086 tree savar = create_tmp_var_raw (atype);
4087 if (TREE_ADDRESSABLE (new_var))
4088 TREE_ADDRESSABLE (savar) = 1;
4089 DECL_ATTRIBUTES (savar)
4090 = tree_cons (get_identifier ("omp simd array"), NULL,
4091 tree_cons (get_identifier ("omp simd inscan "
4092 "exclusive"), NULL,
4093 DECL_ATTRIBUTES (savar)));
4094 gimple_add_tmp_var (savar);
4095 ctx->cb.decl_map->put (iavar, savar);
4096 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4097 sctx->idx, NULL_TREE, NULL_TREE);
4098 TREE_THIS_NOTRAP (*rvar2) = 1;
4101 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4102 NULL_TREE, NULL_TREE);
4103 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4104 NULL_TREE, NULL_TREE);
4105 TREE_THIS_NOTRAP (ivar) = 1;
4106 TREE_THIS_NOTRAP (lvar) = 1;
4108 if (DECL_P (new_var))
4110 SET_DECL_VALUE_EXPR (new_var, lvar);
4111 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4113 return true;
4116 /* Helper function of lower_rec_input_clauses. For a reference
4117 in simd reduction, add an underlying variable it will reference. */
4119 static void
4120 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4122 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4123 if (TREE_CONSTANT (z))
4125 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4126 get_name (new_vard));
4127 gimple_add_tmp_var (z);
4128 TREE_ADDRESSABLE (z) = 1;
4129 z = build_fold_addr_expr_loc (loc, z);
4130 gimplify_assign (new_vard, z, ilist);
4134 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4135 code to emit (type) (tskred_temp[idx]). */
4137 static tree
4138 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4139 unsigned idx)
4141 unsigned HOST_WIDE_INT sz
4142 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4143 tree r = build2 (MEM_REF, pointer_sized_int_node,
4144 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4145 idx * sz));
4146 tree v = create_tmp_var (pointer_sized_int_node);
4147 gimple *g = gimple_build_assign (v, r);
4148 gimple_seq_add_stmt (ilist, g);
4149 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4151 v = create_tmp_var (type);
4152 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4153 gimple_seq_add_stmt (ilist, g);
4155 return v;
4158 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4159 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4160 private variables. Initialization statements go in ILIST, while calls
4161 to destructors go in DLIST. */
4163 static void
4164 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4165 omp_context *ctx, struct omp_for_data *fd)
4167 tree c, copyin_seq, x, ptr;
4168 bool copyin_by_ref = false;
4169 bool lastprivate_firstprivate = false;
4170 bool reduction_omp_orig_ref = false;
4171 int pass;
4172 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4173 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4174 omplow_simd_context sctx = omplow_simd_context ();
4175 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4176 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4177 gimple_seq llist[4] = { };
4178 tree nonconst_simd_if = NULL_TREE;
4180 copyin_seq = NULL;
4181 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4183 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4184 with data sharing clauses referencing variable sized vars. That
4185 is unnecessarily hard to support and very unlikely to result in
4186 vectorized code anyway. */
4187 if (is_simd)
4188 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4189 switch (OMP_CLAUSE_CODE (c))
4191 case OMP_CLAUSE_LINEAR:
4192 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4193 sctx.max_vf = 1;
4194 /* FALLTHRU */
4195 case OMP_CLAUSE_PRIVATE:
4196 case OMP_CLAUSE_FIRSTPRIVATE:
4197 case OMP_CLAUSE_LASTPRIVATE:
4198 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4199 sctx.max_vf = 1;
4200 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4202 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4203 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4204 sctx.max_vf = 1;
4206 break;
4207 case OMP_CLAUSE_REDUCTION:
4208 case OMP_CLAUSE_IN_REDUCTION:
4209 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4210 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4211 sctx.max_vf = 1;
4212 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4214 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4215 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4216 sctx.max_vf = 1;
4218 break;
4219 case OMP_CLAUSE_IF:
4220 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4221 sctx.max_vf = 1;
4222 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4223 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4224 break;
4225 case OMP_CLAUSE_SIMDLEN:
4226 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4227 sctx.max_vf = 1;
4228 break;
4229 case OMP_CLAUSE__CONDTEMP_:
4230 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4231 if (sctx.is_simt)
4232 sctx.max_vf = 1;
4233 break;
4234 default:
4235 continue;
4238 /* Add a placeholder for simduid. */
4239 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4240 sctx.simt_eargs.safe_push (NULL_TREE);
4242 unsigned task_reduction_cnt = 0;
4243 unsigned task_reduction_cntorig = 0;
4244 unsigned task_reduction_cnt_full = 0;
4245 unsigned task_reduction_cntorig_full = 0;
4246 unsigned task_reduction_other_cnt = 0;
4247 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4248 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4249 /* Do all the fixed sized types in the first pass, and the variable sized
4250 types in the second pass. This makes sure that the scalar arguments to
4251 the variable sized types are processed before we use them in the
4252 variable sized operations. For task reductions we use 4 passes, in the
4253 first two we ignore them, in the third one gather arguments for
4254 GOMP_task_reduction_remap call and in the last pass actually handle
4255 the task reductions. */
4256 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4257 ? 4 : 2); ++pass)
4259 if (pass == 2 && task_reduction_cnt)
4261 tskred_atype
4262 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4263 + task_reduction_cntorig);
4264 tskred_avar = create_tmp_var_raw (tskred_atype);
4265 gimple_add_tmp_var (tskred_avar);
4266 TREE_ADDRESSABLE (tskred_avar) = 1;
4267 task_reduction_cnt_full = task_reduction_cnt;
4268 task_reduction_cntorig_full = task_reduction_cntorig;
4270 else if (pass == 3 && task_reduction_cnt)
4272 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4273 gimple *g
4274 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4275 size_int (task_reduction_cntorig),
4276 build_fold_addr_expr (tskred_avar));
4277 gimple_seq_add_stmt (ilist, g);
4279 if (pass == 3 && task_reduction_other_cnt)
4281 /* For reduction clauses, build
4282 tskred_base = (void *) tskred_temp[2]
4283 + omp_get_thread_num () * tskred_temp[1]
4284 or if tskred_temp[1] is known to be constant, that constant
4285 directly. This is the start of the private reduction copy block
4286 for the current thread. */
4287 tree v = create_tmp_var (integer_type_node);
4288 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4289 gimple *g = gimple_build_call (x, 0);
4290 gimple_call_set_lhs (g, v);
4291 gimple_seq_add_stmt (ilist, g);
4292 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4293 tskred_temp = OMP_CLAUSE_DECL (c);
4294 if (is_taskreg_ctx (ctx))
4295 tskred_temp = lookup_decl (tskred_temp, ctx);
4296 tree v2 = create_tmp_var (sizetype);
4297 g = gimple_build_assign (v2, NOP_EXPR, v);
4298 gimple_seq_add_stmt (ilist, g);
4299 if (ctx->task_reductions[0])
4300 v = fold_convert (sizetype, ctx->task_reductions[0]);
4301 else
4302 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4303 tree v3 = create_tmp_var (sizetype);
4304 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4305 gimple_seq_add_stmt (ilist, g);
4306 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4307 tskred_base = create_tmp_var (ptr_type_node);
4308 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4309 gimple_seq_add_stmt (ilist, g);
4311 task_reduction_cnt = 0;
4312 task_reduction_cntorig = 0;
4313 task_reduction_other_cnt = 0;
4314 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4316 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4317 tree var, new_var;
4318 bool by_ref;
4319 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4320 bool task_reduction_p = false;
4321 bool task_reduction_needs_orig_p = false;
4322 tree cond = NULL_TREE;
4324 switch (c_kind)
4326 case OMP_CLAUSE_PRIVATE:
4327 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4328 continue;
4329 break;
4330 case OMP_CLAUSE_SHARED:
4331 /* Ignore shared directives in teams construct inside
4332 of target construct. */
4333 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4334 && !is_host_teams_ctx (ctx))
4335 continue;
4336 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4338 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4339 || is_global_var (OMP_CLAUSE_DECL (c)));
4340 continue;
4342 case OMP_CLAUSE_FIRSTPRIVATE:
4343 case OMP_CLAUSE_COPYIN:
4344 break;
4345 case OMP_CLAUSE_LINEAR:
4346 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4347 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4348 lastprivate_firstprivate = true;
4349 break;
4350 case OMP_CLAUSE_REDUCTION:
4351 case OMP_CLAUSE_IN_REDUCTION:
4352 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4354 task_reduction_p = true;
4355 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4357 task_reduction_other_cnt++;
4358 if (pass == 2)
4359 continue;
4361 else
4362 task_reduction_cnt++;
4363 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4365 var = OMP_CLAUSE_DECL (c);
4366 /* If var is a global variable that isn't privatized
4367 in outer contexts, we don't need to look up the
4368 original address, it is always the address of the
4369 global variable itself. */
4370 if (!DECL_P (var)
4371 || omp_is_reference (var)
4372 || !is_global_var
4373 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4375 task_reduction_needs_orig_p = true;
4376 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4377 task_reduction_cntorig++;
4381 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4382 reduction_omp_orig_ref = true;
4383 break;
4384 case OMP_CLAUSE__REDUCTEMP_:
4385 if (!is_taskreg_ctx (ctx))
4386 continue;
4387 /* FALLTHRU */
4388 case OMP_CLAUSE__LOOPTEMP_:
4389 /* Handle _looptemp_/_reductemp_ clauses only on
4390 parallel/task. */
4391 if (fd)
4392 continue;
4393 break;
4394 case OMP_CLAUSE_LASTPRIVATE:
4395 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4397 lastprivate_firstprivate = true;
4398 if (pass != 0 || is_taskloop_ctx (ctx))
4399 continue;
4401 /* Even without corresponding firstprivate, if
4402 decl is Fortran allocatable, it needs outer var
4403 reference. */
4404 else if (pass == 0
4405 && lang_hooks.decls.omp_private_outer_ref
4406 (OMP_CLAUSE_DECL (c)))
4407 lastprivate_firstprivate = true;
4408 break;
4409 case OMP_CLAUSE_ALIGNED:
4410 if (pass != 1)
4411 continue;
4412 var = OMP_CLAUSE_DECL (c);
4413 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4414 && !is_global_var (var))
4416 new_var = maybe_lookup_decl (var, ctx);
4417 if (new_var == NULL_TREE)
4418 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4419 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4420 tree alarg = omp_clause_aligned_alignment (c);
4421 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4422 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4423 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4424 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4425 gimplify_and_add (x, ilist);
4427 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4428 && is_global_var (var))
4430 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4431 new_var = lookup_decl (var, ctx);
4432 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4433 t = build_fold_addr_expr_loc (clause_loc, t);
4434 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4435 tree alarg = omp_clause_aligned_alignment (c);
4436 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4437 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4438 t = fold_convert_loc (clause_loc, ptype, t);
4439 x = create_tmp_var (ptype);
4440 t = build2 (MODIFY_EXPR, ptype, x, t);
4441 gimplify_and_add (t, ilist);
4442 t = build_simple_mem_ref_loc (clause_loc, x);
4443 SET_DECL_VALUE_EXPR (new_var, t);
4444 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4446 continue;
4447 case OMP_CLAUSE__CONDTEMP_:
4448 if (is_parallel_ctx (ctx)
4449 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4450 break;
4451 continue;
4452 default:
4453 continue;
4456 if (task_reduction_p != (pass >= 2))
4457 continue;
4459 new_var = var = OMP_CLAUSE_DECL (c);
4460 if ((c_kind == OMP_CLAUSE_REDUCTION
4461 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4462 && TREE_CODE (var) == MEM_REF)
4464 var = TREE_OPERAND (var, 0);
4465 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4466 var = TREE_OPERAND (var, 0);
4467 if (TREE_CODE (var) == INDIRECT_REF
4468 || TREE_CODE (var) == ADDR_EXPR)
4469 var = TREE_OPERAND (var, 0);
4470 if (is_variable_sized (var))
4472 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4473 var = DECL_VALUE_EXPR (var);
4474 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4475 var = TREE_OPERAND (var, 0);
4476 gcc_assert (DECL_P (var));
4478 new_var = var;
4480 if (c_kind != OMP_CLAUSE_COPYIN)
4481 new_var = lookup_decl (var, ctx);
4483 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4485 if (pass != 0)
4486 continue;
4488 /* C/C++ array section reductions. */
4489 else if ((c_kind == OMP_CLAUSE_REDUCTION
4490 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4491 && var != OMP_CLAUSE_DECL (c))
4493 if (pass == 0)
4494 continue;
4496 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4497 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4499 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4501 tree b = TREE_OPERAND (orig_var, 1);
4502 b = maybe_lookup_decl (b, ctx);
4503 if (b == NULL)
4505 b = TREE_OPERAND (orig_var, 1);
4506 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4508 if (integer_zerop (bias))
4509 bias = b;
4510 else
4512 bias = fold_convert_loc (clause_loc,
4513 TREE_TYPE (b), bias);
4514 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4515 TREE_TYPE (b), b, bias);
4517 orig_var = TREE_OPERAND (orig_var, 0);
4519 if (pass == 2)
4521 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4522 if (is_global_var (out)
4523 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4524 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4525 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4526 != POINTER_TYPE)))
4527 x = var;
4528 else
4530 bool by_ref = use_pointer_for_field (var, NULL);
4531 x = build_receiver_ref (var, by_ref, ctx);
4532 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4533 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4534 == POINTER_TYPE))
4535 x = build_fold_addr_expr (x);
4537 if (TREE_CODE (orig_var) == INDIRECT_REF)
4538 x = build_simple_mem_ref (x);
4539 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4541 if (var == TREE_OPERAND (orig_var, 0))
4542 x = build_fold_addr_expr (x);
4544 bias = fold_convert (sizetype, bias);
4545 x = fold_convert (ptr_type_node, x);
4546 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4547 TREE_TYPE (x), x, bias);
4548 unsigned cnt = task_reduction_cnt - 1;
4549 if (!task_reduction_needs_orig_p)
4550 cnt += (task_reduction_cntorig_full
4551 - task_reduction_cntorig);
4552 else
4553 cnt = task_reduction_cntorig - 1;
4554 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4555 size_int (cnt), NULL_TREE, NULL_TREE);
4556 gimplify_assign (r, x, ilist);
4557 continue;
4560 if (TREE_CODE (orig_var) == INDIRECT_REF
4561 || TREE_CODE (orig_var) == ADDR_EXPR)
4562 orig_var = TREE_OPERAND (orig_var, 0);
4563 tree d = OMP_CLAUSE_DECL (c);
4564 tree type = TREE_TYPE (d);
4565 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4566 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4567 const char *name = get_name (orig_var);
4568 if (pass == 3)
4570 tree xv = create_tmp_var (ptr_type_node);
4571 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4573 unsigned cnt = task_reduction_cnt - 1;
4574 if (!task_reduction_needs_orig_p)
4575 cnt += (task_reduction_cntorig_full
4576 - task_reduction_cntorig);
4577 else
4578 cnt = task_reduction_cntorig - 1;
4579 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4580 size_int (cnt), NULL_TREE, NULL_TREE);
4582 gimple *g = gimple_build_assign (xv, x);
4583 gimple_seq_add_stmt (ilist, g);
4585 else
4587 unsigned int idx = *ctx->task_reduction_map->get (c);
4588 tree off;
4589 if (ctx->task_reductions[1 + idx])
4590 off = fold_convert (sizetype,
4591 ctx->task_reductions[1 + idx]);
4592 else
4593 off = task_reduction_read (ilist, tskred_temp, sizetype,
4594 7 + 3 * idx + 1);
4595 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4596 tskred_base, off);
4597 gimple_seq_add_stmt (ilist, g);
4599 x = fold_convert (build_pointer_type (boolean_type_node),
4600 xv);
4601 if (TREE_CONSTANT (v))
4602 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4603 TYPE_SIZE_UNIT (type));
4604 else
4606 tree t = maybe_lookup_decl (v, ctx);
4607 if (t)
4608 v = t;
4609 else
4610 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4611 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4612 fb_rvalue);
4613 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4614 TREE_TYPE (v), v,
4615 build_int_cst (TREE_TYPE (v), 1));
4616 t = fold_build2_loc (clause_loc, MULT_EXPR,
4617 TREE_TYPE (v), t,
4618 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4619 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4621 cond = create_tmp_var (TREE_TYPE (x));
4622 gimplify_assign (cond, x, ilist);
4623 x = xv;
4625 else if (TREE_CONSTANT (v))
4627 x = create_tmp_var_raw (type, name);
4628 gimple_add_tmp_var (x);
4629 TREE_ADDRESSABLE (x) = 1;
4630 x = build_fold_addr_expr_loc (clause_loc, x);
4632 else
4634 tree atmp
4635 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4636 tree t = maybe_lookup_decl (v, ctx);
4637 if (t)
4638 v = t;
4639 else
4640 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4641 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4642 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4643 TREE_TYPE (v), v,
4644 build_int_cst (TREE_TYPE (v), 1));
4645 t = fold_build2_loc (clause_loc, MULT_EXPR,
4646 TREE_TYPE (v), t,
4647 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4648 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4649 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4652 tree ptype = build_pointer_type (TREE_TYPE (type));
4653 x = fold_convert_loc (clause_loc, ptype, x);
4654 tree y = create_tmp_var (ptype, name);
4655 gimplify_assign (y, x, ilist);
4656 x = y;
4657 tree yb = y;
4659 if (!integer_zerop (bias))
4661 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4662 bias);
4663 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4665 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4666 pointer_sized_int_node, yb, bias);
4667 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4668 yb = create_tmp_var (ptype, name);
4669 gimplify_assign (yb, x, ilist);
4670 x = yb;
4673 d = TREE_OPERAND (d, 0);
4674 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4675 d = TREE_OPERAND (d, 0);
4676 if (TREE_CODE (d) == ADDR_EXPR)
4678 if (orig_var != var)
4680 gcc_assert (is_variable_sized (orig_var));
4681 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4683 gimplify_assign (new_var, x, ilist);
4684 tree new_orig_var = lookup_decl (orig_var, ctx);
4685 tree t = build_fold_indirect_ref (new_var);
4686 DECL_IGNORED_P (new_var) = 0;
4687 TREE_THIS_NOTRAP (t) = 1;
4688 SET_DECL_VALUE_EXPR (new_orig_var, t);
4689 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4691 else
4693 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4694 build_int_cst (ptype, 0));
4695 SET_DECL_VALUE_EXPR (new_var, x);
4696 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4699 else
4701 gcc_assert (orig_var == var);
4702 if (TREE_CODE (d) == INDIRECT_REF)
4704 x = create_tmp_var (ptype, name);
4705 TREE_ADDRESSABLE (x) = 1;
4706 gimplify_assign (x, yb, ilist);
4707 x = build_fold_addr_expr_loc (clause_loc, x);
4709 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4710 gimplify_assign (new_var, x, ilist);
4712 /* GOMP_taskgroup_reduction_register memsets the whole
4713 array to zero. If the initializer is zero, we don't
4714 need to initialize it again, just mark it as ever
4715 used unconditionally, i.e. cond = true. */
4716 if (cond
4717 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4718 && initializer_zerop (omp_reduction_init (c,
4719 TREE_TYPE (type))))
4721 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4722 boolean_true_node);
4723 gimple_seq_add_stmt (ilist, g);
4724 continue;
4726 tree end = create_artificial_label (UNKNOWN_LOCATION);
4727 if (cond)
4729 gimple *g;
4730 if (!is_parallel_ctx (ctx))
4732 tree condv = create_tmp_var (boolean_type_node);
4733 g = gimple_build_assign (condv,
4734 build_simple_mem_ref (cond));
4735 gimple_seq_add_stmt (ilist, g);
4736 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4737 g = gimple_build_cond (NE_EXPR, condv,
4738 boolean_false_node, end, lab1);
4739 gimple_seq_add_stmt (ilist, g);
4740 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4742 g = gimple_build_assign (build_simple_mem_ref (cond),
4743 boolean_true_node);
4744 gimple_seq_add_stmt (ilist, g);
4747 tree y1 = create_tmp_var (ptype);
4748 gimplify_assign (y1, y, ilist);
4749 tree i2 = NULL_TREE, y2 = NULL_TREE;
4750 tree body2 = NULL_TREE, end2 = NULL_TREE;
4751 tree y3 = NULL_TREE, y4 = NULL_TREE;
4752 if (task_reduction_needs_orig_p)
4754 y3 = create_tmp_var (ptype);
4755 tree ref;
4756 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4757 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4758 size_int (task_reduction_cnt_full
4759 + task_reduction_cntorig - 1),
4760 NULL_TREE, NULL_TREE);
4761 else
4763 unsigned int idx = *ctx->task_reduction_map->get (c);
4764 ref = task_reduction_read (ilist, tskred_temp, ptype,
4765 7 + 3 * idx);
4767 gimplify_assign (y3, ref, ilist);
4769 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4771 if (pass != 3)
4773 y2 = create_tmp_var (ptype);
4774 gimplify_assign (y2, y, ilist);
4776 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4778 tree ref = build_outer_var_ref (var, ctx);
4779 /* For ref build_outer_var_ref already performs this. */
4780 if (TREE_CODE (d) == INDIRECT_REF)
4781 gcc_assert (omp_is_reference (var));
4782 else if (TREE_CODE (d) == ADDR_EXPR)
4783 ref = build_fold_addr_expr (ref);
4784 else if (omp_is_reference (var))
4785 ref = build_fold_addr_expr (ref);
4786 ref = fold_convert_loc (clause_loc, ptype, ref);
4787 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4788 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4790 y3 = create_tmp_var (ptype);
4791 gimplify_assign (y3, unshare_expr (ref), ilist);
4793 if (is_simd)
4795 y4 = create_tmp_var (ptype);
4796 gimplify_assign (y4, ref, dlist);
4800 tree i = create_tmp_var (TREE_TYPE (v));
4801 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4802 tree body = create_artificial_label (UNKNOWN_LOCATION);
4803 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4804 if (y2)
4806 i2 = create_tmp_var (TREE_TYPE (v));
4807 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4808 body2 = create_artificial_label (UNKNOWN_LOCATION);
4809 end2 = create_artificial_label (UNKNOWN_LOCATION);
4810 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4812 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4814 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4815 tree decl_placeholder
4816 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4817 SET_DECL_VALUE_EXPR (decl_placeholder,
4818 build_simple_mem_ref (y1));
4819 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4820 SET_DECL_VALUE_EXPR (placeholder,
4821 y3 ? build_simple_mem_ref (y3)
4822 : error_mark_node);
4823 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4824 x = lang_hooks.decls.omp_clause_default_ctor
4825 (c, build_simple_mem_ref (y1),
4826 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4827 if (x)
4828 gimplify_and_add (x, ilist);
4829 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4831 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4832 lower_omp (&tseq, ctx);
4833 gimple_seq_add_seq (ilist, tseq);
4835 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4836 if (is_simd)
4838 SET_DECL_VALUE_EXPR (decl_placeholder,
4839 build_simple_mem_ref (y2));
4840 SET_DECL_VALUE_EXPR (placeholder,
4841 build_simple_mem_ref (y4));
4842 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4843 lower_omp (&tseq, ctx);
4844 gimple_seq_add_seq (dlist, tseq);
4845 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4847 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4848 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4849 if (y2)
4851 x = lang_hooks.decls.omp_clause_dtor
4852 (c, build_simple_mem_ref (y2));
4853 if (x)
4854 gimplify_and_add (x, dlist);
4857 else
4859 x = omp_reduction_init (c, TREE_TYPE (type));
4860 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4862 /* reduction(-:var) sums up the partial results, so it
4863 acts identically to reduction(+:var). */
4864 if (code == MINUS_EXPR)
4865 code = PLUS_EXPR;
4867 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4868 if (is_simd)
4870 x = build2 (code, TREE_TYPE (type),
4871 build_simple_mem_ref (y4),
4872 build_simple_mem_ref (y2));
4873 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4876 gimple *g
4877 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4878 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4879 gimple_seq_add_stmt (ilist, g);
4880 if (y3)
4882 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4883 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4884 gimple_seq_add_stmt (ilist, g);
4886 g = gimple_build_assign (i, PLUS_EXPR, i,
4887 build_int_cst (TREE_TYPE (i), 1));
4888 gimple_seq_add_stmt (ilist, g);
4889 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4890 gimple_seq_add_stmt (ilist, g);
4891 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4892 if (y2)
4894 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4895 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4896 gimple_seq_add_stmt (dlist, g);
4897 if (y4)
4899 g = gimple_build_assign
4900 (y4, POINTER_PLUS_EXPR, y4,
4901 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4902 gimple_seq_add_stmt (dlist, g);
4904 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4905 build_int_cst (TREE_TYPE (i2), 1));
4906 gimple_seq_add_stmt (dlist, g);
4907 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4908 gimple_seq_add_stmt (dlist, g);
4909 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4911 continue;
4913 else if (pass == 2)
4915 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4916 x = var;
4917 else
4919 bool by_ref = use_pointer_for_field (var, ctx);
4920 x = build_receiver_ref (var, by_ref, ctx);
4922 if (!omp_is_reference (var))
4923 x = build_fold_addr_expr (x);
4924 x = fold_convert (ptr_type_node, x);
4925 unsigned cnt = task_reduction_cnt - 1;
4926 if (!task_reduction_needs_orig_p)
4927 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4928 else
4929 cnt = task_reduction_cntorig - 1;
4930 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4931 size_int (cnt), NULL_TREE, NULL_TREE);
4932 gimplify_assign (r, x, ilist);
4933 continue;
4935 else if (pass == 3)
4937 tree type = TREE_TYPE (new_var);
4938 if (!omp_is_reference (var))
4939 type = build_pointer_type (type);
4940 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4942 unsigned cnt = task_reduction_cnt - 1;
4943 if (!task_reduction_needs_orig_p)
4944 cnt += (task_reduction_cntorig_full
4945 - task_reduction_cntorig);
4946 else
4947 cnt = task_reduction_cntorig - 1;
4948 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4949 size_int (cnt), NULL_TREE, NULL_TREE);
4951 else
4953 unsigned int idx = *ctx->task_reduction_map->get (c);
4954 tree off;
4955 if (ctx->task_reductions[1 + idx])
4956 off = fold_convert (sizetype,
4957 ctx->task_reductions[1 + idx]);
4958 else
4959 off = task_reduction_read (ilist, tskred_temp, sizetype,
4960 7 + 3 * idx + 1);
4961 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4962 tskred_base, off);
4964 x = fold_convert (type, x);
4965 tree t;
4966 if (omp_is_reference (var))
4968 gimplify_assign (new_var, x, ilist);
4969 t = new_var;
4970 new_var = build_simple_mem_ref (new_var);
4972 else
4974 t = create_tmp_var (type);
4975 gimplify_assign (t, x, ilist);
4976 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4977 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4979 t = fold_convert (build_pointer_type (boolean_type_node), t);
4980 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4981 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4982 cond = create_tmp_var (TREE_TYPE (t));
4983 gimplify_assign (cond, t, ilist);
4985 else if (is_variable_sized (var))
4987 /* For variable sized types, we need to allocate the
4988 actual storage here. Call alloca and store the
4989 result in the pointer decl that we created elsewhere. */
4990 if (pass == 0)
4991 continue;
4993 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4995 gcall *stmt;
4996 tree tmp, atmp;
4998 ptr = DECL_VALUE_EXPR (new_var);
4999 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5000 ptr = TREE_OPERAND (ptr, 0);
5001 gcc_assert (DECL_P (ptr));
5002 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5004 /* void *tmp = __builtin_alloca */
5005 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5006 stmt = gimple_build_call (atmp, 2, x,
5007 size_int (DECL_ALIGN (var)));
5008 tmp = create_tmp_var_raw (ptr_type_node);
5009 gimple_add_tmp_var (tmp);
5010 gimple_call_set_lhs (stmt, tmp);
5012 gimple_seq_add_stmt (ilist, stmt);
5014 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5015 gimplify_assign (ptr, x, ilist);
5018 else if (omp_is_reference (var)
5019 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5020 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5022 /* For references that are being privatized for Fortran,
5023 allocate new backing storage for the new pointer
5024 variable. This allows us to avoid changing all the
5025 code that expects a pointer to something that expects
5026 a direct variable. */
5027 if (pass == 0)
5028 continue;
5030 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5031 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5033 x = build_receiver_ref (var, false, ctx);
5034 x = build_fold_addr_expr_loc (clause_loc, x);
5036 else if (TREE_CONSTANT (x))
5038 /* For reduction in SIMD loop, defer adding the
5039 initialization of the reference, because if we decide
5040 to use SIMD array for it, the initilization could cause
5041 expansion ICE. Ditto for other privatization clauses. */
5042 if (is_simd)
5043 x = NULL_TREE;
5044 else
5046 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5047 get_name (var));
5048 gimple_add_tmp_var (x);
5049 TREE_ADDRESSABLE (x) = 1;
5050 x = build_fold_addr_expr_loc (clause_loc, x);
5053 else
5055 tree atmp
5056 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5057 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5058 tree al = size_int (TYPE_ALIGN (rtype));
5059 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5062 if (x)
5064 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5065 gimplify_assign (new_var, x, ilist);
5068 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5070 else if ((c_kind == OMP_CLAUSE_REDUCTION
5071 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5072 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5074 if (pass == 0)
5075 continue;
5077 else if (pass != 0)
5078 continue;
5080 switch (OMP_CLAUSE_CODE (c))
5082 case OMP_CLAUSE_SHARED:
5083 /* Ignore shared directives in teams construct inside
5084 target construct. */
5085 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5086 && !is_host_teams_ctx (ctx))
5087 continue;
5088 /* Shared global vars are just accessed directly. */
5089 if (is_global_var (new_var))
5090 break;
5091 /* For taskloop firstprivate/lastprivate, represented
5092 as firstprivate and shared clause on the task, new_var
5093 is the firstprivate var. */
5094 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5095 break;
5096 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5097 needs to be delayed until after fixup_child_record_type so
5098 that we get the correct type during the dereference. */
5099 by_ref = use_pointer_for_field (var, ctx);
5100 x = build_receiver_ref (var, by_ref, ctx);
5101 SET_DECL_VALUE_EXPR (new_var, x);
5102 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5104 /* ??? If VAR is not passed by reference, and the variable
5105 hasn't been initialized yet, then we'll get a warning for
5106 the store into the omp_data_s structure. Ideally, we'd be
5107 able to notice this and not store anything at all, but
5108 we're generating code too early. Suppress the warning. */
5109 if (!by_ref)
5110 TREE_NO_WARNING (var) = 1;
5111 break;
5113 case OMP_CLAUSE__CONDTEMP_:
5114 if (is_parallel_ctx (ctx))
5116 x = build_receiver_ref (var, false, ctx);
5117 SET_DECL_VALUE_EXPR (new_var, x);
5118 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5120 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5122 x = build_zero_cst (TREE_TYPE (var));
5123 goto do_private;
5125 break;
5127 case OMP_CLAUSE_LASTPRIVATE:
5128 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5129 break;
5130 /* FALLTHRU */
5132 case OMP_CLAUSE_PRIVATE:
5133 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5134 x = build_outer_var_ref (var, ctx);
5135 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5137 if (is_task_ctx (ctx))
5138 x = build_receiver_ref (var, false, ctx);
5139 else
5140 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5142 else
5143 x = NULL;
5144 do_private:
5145 tree nx;
5146 bool copy_ctor;
5147 copy_ctor = false;
5148 nx = unshare_expr (new_var);
5149 if (is_simd
5150 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5151 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5152 copy_ctor = true;
5153 if (copy_ctor)
5154 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5155 else
5156 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5157 if (is_simd)
5159 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5160 if ((TREE_ADDRESSABLE (new_var) || nx || y
5161 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5162 && (gimple_omp_for_collapse (ctx->stmt) != 1
5163 || (gimple_omp_for_index (ctx->stmt, 0)
5164 != new_var)))
5165 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5166 || omp_is_reference (var))
5167 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5168 ivar, lvar))
5170 if (omp_is_reference (var))
5172 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5173 tree new_vard = TREE_OPERAND (new_var, 0);
5174 gcc_assert (DECL_P (new_vard));
5175 SET_DECL_VALUE_EXPR (new_vard,
5176 build_fold_addr_expr (lvar));
5177 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5180 if (nx)
5182 tree iv = unshare_expr (ivar);
5183 if (copy_ctor)
5184 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5186 else
5187 x = lang_hooks.decls.omp_clause_default_ctor (c,
5191 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5193 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5194 unshare_expr (ivar), x);
5195 nx = x;
5197 if (nx && x)
5198 gimplify_and_add (x, &llist[0]);
5199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5200 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5202 tree v = new_var;
5203 if (!DECL_P (v))
5205 gcc_assert (TREE_CODE (v) == MEM_REF);
5206 v = TREE_OPERAND (v, 0);
5207 gcc_assert (DECL_P (v));
5209 v = *ctx->lastprivate_conditional_map->get (v);
5210 tree t = create_tmp_var (TREE_TYPE (v));
5211 tree z = build_zero_cst (TREE_TYPE (v));
5212 tree orig_v
5213 = build_outer_var_ref (var, ctx,
5214 OMP_CLAUSE_LASTPRIVATE);
5215 gimple_seq_add_stmt (dlist,
5216 gimple_build_assign (t, z));
5217 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5218 tree civar = DECL_VALUE_EXPR (v);
5219 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5220 civar = unshare_expr (civar);
5221 TREE_OPERAND (civar, 1) = sctx.idx;
5222 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5223 unshare_expr (civar));
5224 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5225 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5226 orig_v, unshare_expr (ivar)));
5227 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5228 civar);
5229 x = build3 (COND_EXPR, void_type_node, cond, x,
5230 void_node);
5231 gimple_seq tseq = NULL;
5232 gimplify_and_add (x, &tseq);
5233 if (ctx->outer)
5234 lower_omp (&tseq, ctx->outer);
5235 gimple_seq_add_seq (&llist[1], tseq);
5237 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5238 && ctx->for_simd_scan_phase)
5240 x = unshare_expr (ivar);
5241 tree orig_v
5242 = build_outer_var_ref (var, ctx,
5243 OMP_CLAUSE_LASTPRIVATE);
5244 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5245 orig_v);
5246 gimplify_and_add (x, &llist[0]);
5248 if (y)
5250 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5251 if (y)
5252 gimplify_and_add (y, &llist[1]);
5254 break;
5256 if (omp_is_reference (var))
5258 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5259 tree new_vard = TREE_OPERAND (new_var, 0);
5260 gcc_assert (DECL_P (new_vard));
5261 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5262 x = TYPE_SIZE_UNIT (type);
5263 if (TREE_CONSTANT (x))
5265 x = create_tmp_var_raw (type, get_name (var));
5266 gimple_add_tmp_var (x);
5267 TREE_ADDRESSABLE (x) = 1;
5268 x = build_fold_addr_expr_loc (clause_loc, x);
5269 x = fold_convert_loc (clause_loc,
5270 TREE_TYPE (new_vard), x);
5271 gimplify_assign (new_vard, x, ilist);
5275 if (nx)
5276 gimplify_and_add (nx, ilist);
5277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5278 && is_simd
5279 && ctx->for_simd_scan_phase)
5281 tree orig_v = build_outer_var_ref (var, ctx,
5282 OMP_CLAUSE_LASTPRIVATE);
5283 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5284 orig_v);
5285 gimplify_and_add (x, ilist);
5287 /* FALLTHRU */
5289 do_dtor:
5290 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5291 if (x)
5292 gimplify_and_add (x, dlist);
5293 break;
5295 case OMP_CLAUSE_LINEAR:
5296 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5297 goto do_firstprivate;
5298 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5299 x = NULL;
5300 else
5301 x = build_outer_var_ref (var, ctx);
5302 goto do_private;
5304 case OMP_CLAUSE_FIRSTPRIVATE:
5305 if (is_task_ctx (ctx))
5307 if ((omp_is_reference (var)
5308 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5309 || is_variable_sized (var))
5310 goto do_dtor;
5311 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5312 ctx))
5313 || use_pointer_for_field (var, NULL))
5315 x = build_receiver_ref (var, false, ctx);
5316 SET_DECL_VALUE_EXPR (new_var, x);
5317 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5318 goto do_dtor;
5321 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5322 && omp_is_reference (var))
5324 x = build_outer_var_ref (var, ctx);
5325 gcc_assert (TREE_CODE (x) == MEM_REF
5326 && integer_zerop (TREE_OPERAND (x, 1)));
5327 x = TREE_OPERAND (x, 0);
5328 x = lang_hooks.decls.omp_clause_copy_ctor
5329 (c, unshare_expr (new_var), x);
5330 gimplify_and_add (x, ilist);
5331 goto do_dtor;
5333 do_firstprivate:
5334 x = build_outer_var_ref (var, ctx);
5335 if (is_simd)
5337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5338 && gimple_omp_for_combined_into_p (ctx->stmt))
5340 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5341 tree stept = TREE_TYPE (t);
5342 tree ct = omp_find_clause (clauses,
5343 OMP_CLAUSE__LOOPTEMP_);
5344 gcc_assert (ct);
5345 tree l = OMP_CLAUSE_DECL (ct);
5346 tree n1 = fd->loop.n1;
5347 tree step = fd->loop.step;
5348 tree itype = TREE_TYPE (l);
5349 if (POINTER_TYPE_P (itype))
5350 itype = signed_type_for (itype);
5351 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5352 if (TYPE_UNSIGNED (itype)
5353 && fd->loop.cond_code == GT_EXPR)
5354 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5355 fold_build1 (NEGATE_EXPR, itype, l),
5356 fold_build1 (NEGATE_EXPR,
5357 itype, step));
5358 else
5359 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5360 t = fold_build2 (MULT_EXPR, stept,
5361 fold_convert (stept, l), t);
5363 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5365 if (omp_is_reference (var))
5367 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5368 tree new_vard = TREE_OPERAND (new_var, 0);
5369 gcc_assert (DECL_P (new_vard));
5370 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5371 nx = TYPE_SIZE_UNIT (type);
5372 if (TREE_CONSTANT (nx))
5374 nx = create_tmp_var_raw (type,
5375 get_name (var));
5376 gimple_add_tmp_var (nx);
5377 TREE_ADDRESSABLE (nx) = 1;
5378 nx = build_fold_addr_expr_loc (clause_loc,
5379 nx);
5380 nx = fold_convert_loc (clause_loc,
5381 TREE_TYPE (new_vard),
5382 nx);
5383 gimplify_assign (new_vard, nx, ilist);
5387 x = lang_hooks.decls.omp_clause_linear_ctor
5388 (c, new_var, x, t);
5389 gimplify_and_add (x, ilist);
5390 goto do_dtor;
5393 if (POINTER_TYPE_P (TREE_TYPE (x)))
5394 x = fold_build2 (POINTER_PLUS_EXPR,
5395 TREE_TYPE (x), x, t);
5396 else
5397 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5400 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5401 || TREE_ADDRESSABLE (new_var)
5402 || omp_is_reference (var))
5403 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5404 ivar, lvar))
5406 if (omp_is_reference (var))
5408 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5409 tree new_vard = TREE_OPERAND (new_var, 0);
5410 gcc_assert (DECL_P (new_vard));
5411 SET_DECL_VALUE_EXPR (new_vard,
5412 build_fold_addr_expr (lvar));
5413 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5415 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5417 tree iv = create_tmp_var (TREE_TYPE (new_var));
5418 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5419 gimplify_and_add (x, ilist);
5420 gimple_stmt_iterator gsi
5421 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5422 gassign *g
5423 = gimple_build_assign (unshare_expr (lvar), iv);
5424 gsi_insert_before_without_update (&gsi, g,
5425 GSI_SAME_STMT);
5426 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5427 enum tree_code code = PLUS_EXPR;
5428 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5429 code = POINTER_PLUS_EXPR;
5430 g = gimple_build_assign (iv, code, iv, t);
5431 gsi_insert_before_without_update (&gsi, g,
5432 GSI_SAME_STMT);
5433 break;
5435 x = lang_hooks.decls.omp_clause_copy_ctor
5436 (c, unshare_expr (ivar), x);
5437 gimplify_and_add (x, &llist[0]);
5438 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5439 if (x)
5440 gimplify_and_add (x, &llist[1]);
5441 break;
5443 if (omp_is_reference (var))
5445 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5446 tree new_vard = TREE_OPERAND (new_var, 0);
5447 gcc_assert (DECL_P (new_vard));
5448 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5449 nx = TYPE_SIZE_UNIT (type);
5450 if (TREE_CONSTANT (nx))
5452 nx = create_tmp_var_raw (type, get_name (var));
5453 gimple_add_tmp_var (nx);
5454 TREE_ADDRESSABLE (nx) = 1;
5455 nx = build_fold_addr_expr_loc (clause_loc, nx);
5456 nx = fold_convert_loc (clause_loc,
5457 TREE_TYPE (new_vard), nx);
5458 gimplify_assign (new_vard, nx, ilist);
5462 x = lang_hooks.decls.omp_clause_copy_ctor
5463 (c, unshare_expr (new_var), x);
5464 gimplify_and_add (x, ilist);
5465 goto do_dtor;
5467 case OMP_CLAUSE__LOOPTEMP_:
5468 case OMP_CLAUSE__REDUCTEMP_:
5469 gcc_assert (is_taskreg_ctx (ctx));
5470 x = build_outer_var_ref (var, ctx);
5471 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5472 gimplify_and_add (x, ilist);
5473 break;
5475 case OMP_CLAUSE_COPYIN:
5476 by_ref = use_pointer_for_field (var, NULL);
5477 x = build_receiver_ref (var, by_ref, ctx);
5478 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5479 append_to_statement_list (x, &copyin_seq);
5480 copyin_by_ref |= by_ref;
5481 break;
5483 case OMP_CLAUSE_REDUCTION:
5484 case OMP_CLAUSE_IN_REDUCTION:
5485 /* OpenACC reductions are initialized using the
5486 GOACC_REDUCTION internal function. */
5487 if (is_gimple_omp_oacc (ctx->stmt))
5488 break;
5489 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5491 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5492 gimple *tseq;
5493 tree ptype = TREE_TYPE (placeholder);
5494 if (cond)
5496 x = error_mark_node;
5497 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5498 && !task_reduction_needs_orig_p)
5499 x = var;
5500 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5502 tree pptype = build_pointer_type (ptype);
5503 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5504 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5505 size_int (task_reduction_cnt_full
5506 + task_reduction_cntorig - 1),
5507 NULL_TREE, NULL_TREE);
5508 else
5510 unsigned int idx
5511 = *ctx->task_reduction_map->get (c);
5512 x = task_reduction_read (ilist, tskred_temp,
5513 pptype, 7 + 3 * idx);
5515 x = fold_convert (pptype, x);
5516 x = build_simple_mem_ref (x);
5519 else
5521 x = build_outer_var_ref (var, ctx);
5523 if (omp_is_reference (var)
5524 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5525 x = build_fold_addr_expr_loc (clause_loc, x);
5527 SET_DECL_VALUE_EXPR (placeholder, x);
5528 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5529 tree new_vard = new_var;
5530 if (omp_is_reference (var))
5532 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5533 new_vard = TREE_OPERAND (new_var, 0);
5534 gcc_assert (DECL_P (new_vard));
5536 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5537 if (is_simd
5538 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5539 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5540 rvarp = &rvar;
5541 if (is_simd
5542 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5543 ivar, lvar, rvarp,
5544 &rvar2))
5546 if (new_vard == new_var)
5548 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5549 SET_DECL_VALUE_EXPR (new_var, ivar);
5551 else
5553 SET_DECL_VALUE_EXPR (new_vard,
5554 build_fold_addr_expr (ivar));
5555 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5557 x = lang_hooks.decls.omp_clause_default_ctor
5558 (c, unshare_expr (ivar),
5559 build_outer_var_ref (var, ctx));
5560 if (rvarp && ctx->for_simd_scan_phase)
5562 if (x)
5563 gimplify_and_add (x, &llist[0]);
5564 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5565 if (x)
5566 gimplify_and_add (x, &llist[1]);
5567 break;
5569 else if (rvarp)
5571 if (x)
5573 gimplify_and_add (x, &llist[0]);
5575 tree ivar2 = unshare_expr (lvar);
5576 TREE_OPERAND (ivar2, 1) = sctx.idx;
5577 x = lang_hooks.decls.omp_clause_default_ctor
5578 (c, ivar2, build_outer_var_ref (var, ctx));
5579 gimplify_and_add (x, &llist[0]);
5581 if (rvar2)
5583 x = lang_hooks.decls.omp_clause_default_ctor
5584 (c, unshare_expr (rvar2),
5585 build_outer_var_ref (var, ctx));
5586 gimplify_and_add (x, &llist[0]);
5589 /* For types that need construction, add another
5590 private var which will be default constructed
5591 and optionally initialized with
5592 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5593 loop we want to assign this value instead of
5594 constructing and destructing it in each
5595 iteration. */
5596 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5597 gimple_add_tmp_var (nv);
5598 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5599 ? rvar2
5600 : ivar, 0),
5601 nv);
5602 x = lang_hooks.decls.omp_clause_default_ctor
5603 (c, nv, build_outer_var_ref (var, ctx));
5604 gimplify_and_add (x, ilist);
5606 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5608 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5609 x = DECL_VALUE_EXPR (new_vard);
5610 tree vexpr = nv;
5611 if (new_vard != new_var)
5612 vexpr = build_fold_addr_expr (nv);
5613 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5614 lower_omp (&tseq, ctx);
5615 SET_DECL_VALUE_EXPR (new_vard, x);
5616 gimple_seq_add_seq (ilist, tseq);
5617 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5620 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5621 if (x)
5622 gimplify_and_add (x, dlist);
5625 tree ref = build_outer_var_ref (var, ctx);
5626 x = unshare_expr (ivar);
5627 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5628 ref);
5629 gimplify_and_add (x, &llist[0]);
5631 ref = build_outer_var_ref (var, ctx);
5632 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5633 rvar);
5634 gimplify_and_add (x, &llist[3]);
5636 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5637 if (new_vard == new_var)
5638 SET_DECL_VALUE_EXPR (new_var, lvar);
5639 else
5640 SET_DECL_VALUE_EXPR (new_vard,
5641 build_fold_addr_expr (lvar));
5643 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5644 if (x)
5645 gimplify_and_add (x, &llist[1]);
5647 tree ivar2 = unshare_expr (lvar);
5648 TREE_OPERAND (ivar2, 1) = sctx.idx;
5649 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5650 if (x)
5651 gimplify_and_add (x, &llist[1]);
5653 if (rvar2)
5655 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5656 if (x)
5657 gimplify_and_add (x, &llist[1]);
5659 break;
5661 if (x)
5662 gimplify_and_add (x, &llist[0]);
5663 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5665 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5666 lower_omp (&tseq, ctx);
5667 gimple_seq_add_seq (&llist[0], tseq);
5669 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5670 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5671 lower_omp (&tseq, ctx);
5672 gimple_seq_add_seq (&llist[1], tseq);
5673 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5674 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5675 if (new_vard == new_var)
5676 SET_DECL_VALUE_EXPR (new_var, lvar);
5677 else
5678 SET_DECL_VALUE_EXPR (new_vard,
5679 build_fold_addr_expr (lvar));
5680 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5681 if (x)
5682 gimplify_and_add (x, &llist[1]);
5683 break;
5685 /* If this is a reference to constant size reduction var
5686 with placeholder, we haven't emitted the initializer
5687 for it because it is undesirable if SIMD arrays are used.
5688 But if they aren't used, we need to emit the deferred
5689 initialization now. */
5690 else if (omp_is_reference (var) && is_simd)
5691 handle_simd_reference (clause_loc, new_vard, ilist);
5693 tree lab2 = NULL_TREE;
5694 if (cond)
5696 gimple *g;
5697 if (!is_parallel_ctx (ctx))
5699 tree condv = create_tmp_var (boolean_type_node);
5700 tree m = build_simple_mem_ref (cond);
5701 g = gimple_build_assign (condv, m);
5702 gimple_seq_add_stmt (ilist, g);
5703 tree lab1
5704 = create_artificial_label (UNKNOWN_LOCATION);
5705 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5706 g = gimple_build_cond (NE_EXPR, condv,
5707 boolean_false_node,
5708 lab2, lab1);
5709 gimple_seq_add_stmt (ilist, g);
5710 gimple_seq_add_stmt (ilist,
5711 gimple_build_label (lab1));
5713 g = gimple_build_assign (build_simple_mem_ref (cond),
5714 boolean_true_node);
5715 gimple_seq_add_stmt (ilist, g);
5717 x = lang_hooks.decls.omp_clause_default_ctor
5718 (c, unshare_expr (new_var),
5719 cond ? NULL_TREE
5720 : build_outer_var_ref (var, ctx));
5721 if (x)
5722 gimplify_and_add (x, ilist);
5724 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5725 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5727 if (ctx->for_simd_scan_phase)
5728 goto do_dtor;
5729 if (x || (!is_simd
5730 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5732 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5733 gimple_add_tmp_var (nv);
5734 ctx->cb.decl_map->put (new_vard, nv);
5735 x = lang_hooks.decls.omp_clause_default_ctor
5736 (c, nv, build_outer_var_ref (var, ctx));
5737 if (x)
5738 gimplify_and_add (x, ilist);
5739 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5741 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5742 tree vexpr = nv;
5743 if (new_vard != new_var)
5744 vexpr = build_fold_addr_expr (nv);
5745 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5746 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5747 lower_omp (&tseq, ctx);
5748 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5749 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5750 gimple_seq_add_seq (ilist, tseq);
5752 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5753 if (is_simd && ctx->scan_exclusive)
5755 tree nv2
5756 = create_tmp_var_raw (TREE_TYPE (new_var));
5757 gimple_add_tmp_var (nv2);
5758 ctx->cb.decl_map->put (nv, nv2);
5759 x = lang_hooks.decls.omp_clause_default_ctor
5760 (c, nv2, build_outer_var_ref (var, ctx));
5761 gimplify_and_add (x, ilist);
5762 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5763 if (x)
5764 gimplify_and_add (x, dlist);
5766 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5767 if (x)
5768 gimplify_and_add (x, dlist);
5770 else if (is_simd
5771 && ctx->scan_exclusive
5772 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5774 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5775 gimple_add_tmp_var (nv2);
5776 ctx->cb.decl_map->put (new_vard, nv2);
5777 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5778 if (x)
5779 gimplify_and_add (x, dlist);
5781 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5782 goto do_dtor;
5785 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5787 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5788 lower_omp (&tseq, ctx);
5789 gimple_seq_add_seq (ilist, tseq);
5791 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5792 if (is_simd)
5794 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5795 lower_omp (&tseq, ctx);
5796 gimple_seq_add_seq (dlist, tseq);
5797 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5799 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5800 if (cond)
5802 if (lab2)
5803 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5804 break;
5806 goto do_dtor;
5808 else
5810 x = omp_reduction_init (c, TREE_TYPE (new_var));
5811 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5812 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5814 if (cond)
5816 gimple *g;
5817 tree lab2 = NULL_TREE;
5818 /* GOMP_taskgroup_reduction_register memsets the whole
5819 array to zero. If the initializer is zero, we don't
5820 need to initialize it again, just mark it as ever
5821 used unconditionally, i.e. cond = true. */
5822 if (initializer_zerop (x))
5824 g = gimple_build_assign (build_simple_mem_ref (cond),
5825 boolean_true_node);
5826 gimple_seq_add_stmt (ilist, g);
5827 break;
5830 /* Otherwise, emit
5831 if (!cond) { cond = true; new_var = x; } */
5832 if (!is_parallel_ctx (ctx))
5834 tree condv = create_tmp_var (boolean_type_node);
5835 tree m = build_simple_mem_ref (cond);
5836 g = gimple_build_assign (condv, m);
5837 gimple_seq_add_stmt (ilist, g);
5838 tree lab1
5839 = create_artificial_label (UNKNOWN_LOCATION);
5840 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5841 g = gimple_build_cond (NE_EXPR, condv,
5842 boolean_false_node,
5843 lab2, lab1);
5844 gimple_seq_add_stmt (ilist, g);
5845 gimple_seq_add_stmt (ilist,
5846 gimple_build_label (lab1));
5848 g = gimple_build_assign (build_simple_mem_ref (cond),
5849 boolean_true_node);
5850 gimple_seq_add_stmt (ilist, g);
5851 gimplify_assign (new_var, x, ilist);
5852 if (lab2)
5853 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5854 break;
5857 /* reduction(-:var) sums up the partial results, so it
5858 acts identically to reduction(+:var). */
5859 if (code == MINUS_EXPR)
5860 code = PLUS_EXPR;
5862 tree new_vard = new_var;
5863 if (is_simd && omp_is_reference (var))
5865 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5866 new_vard = TREE_OPERAND (new_var, 0);
5867 gcc_assert (DECL_P (new_vard));
5869 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5870 if (is_simd
5871 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5872 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5873 rvarp = &rvar;
5874 if (is_simd
5875 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5876 ivar, lvar, rvarp,
5877 &rvar2))
5879 if (new_vard != new_var)
5881 SET_DECL_VALUE_EXPR (new_vard,
5882 build_fold_addr_expr (lvar));
5883 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5886 tree ref = build_outer_var_ref (var, ctx);
5888 if (rvarp)
5890 if (ctx->for_simd_scan_phase)
5891 break;
5892 gimplify_assign (ivar, ref, &llist[0]);
5893 ref = build_outer_var_ref (var, ctx);
5894 gimplify_assign (ref, rvar, &llist[3]);
5895 break;
5898 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5900 if (sctx.is_simt)
5902 if (!simt_lane)
5903 simt_lane = create_tmp_var (unsigned_type_node);
5904 x = build_call_expr_internal_loc
5905 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5906 TREE_TYPE (ivar), 2, ivar, simt_lane);
5907 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5908 gimplify_assign (ivar, x, &llist[2]);
5910 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5911 ref = build_outer_var_ref (var, ctx);
5912 gimplify_assign (ref, x, &llist[1]);
5915 else
5917 if (omp_is_reference (var) && is_simd)
5918 handle_simd_reference (clause_loc, new_vard, ilist);
5919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5920 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5921 break;
5922 gimplify_assign (new_var, x, ilist);
5923 if (is_simd)
5925 tree ref = build_outer_var_ref (var, ctx);
5927 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5928 ref = build_outer_var_ref (var, ctx);
5929 gimplify_assign (ref, x, dlist);
5933 break;
5935 default:
5936 gcc_unreachable ();
5940 if (tskred_avar)
5942 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
5943 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5946 if (known_eq (sctx.max_vf, 1U))
5948 sctx.is_simt = false;
5949 if (ctx->lastprivate_conditional_map)
5951 if (gimple_omp_for_combined_into_p (ctx->stmt))
5953 /* Signal to lower_omp_1 that it should use parent context. */
5954 ctx->combined_into_simd_safelen1 = true;
5955 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5956 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5957 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5959 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5960 omp_context *outer = ctx->outer;
5961 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
5962 outer = outer->outer;
5963 tree *v = ctx->lastprivate_conditional_map->get (o);
5964 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
5965 tree *pv = outer->lastprivate_conditional_map->get (po);
5966 *v = *pv;
5969 else
5971 /* When not vectorized, treat lastprivate(conditional:) like
5972 normal lastprivate, as there will be just one simd lane
5973 writing the privatized variable. */
5974 delete ctx->lastprivate_conditional_map;
5975 ctx->lastprivate_conditional_map = NULL;
5980 if (nonconst_simd_if)
5982 if (sctx.lane == NULL_TREE)
5984 sctx.idx = create_tmp_var (unsigned_type_node);
5985 sctx.lane = create_tmp_var (unsigned_type_node);
5987 /* FIXME: For now. */
5988 sctx.is_simt = false;
5991 if (sctx.lane || sctx.is_simt)
5993 uid = create_tmp_var (ptr_type_node, "simduid");
5994 /* Don't want uninit warnings on simduid, it is always uninitialized,
5995 but we use it not for the value, but for the DECL_UID only. */
5996 TREE_NO_WARNING (uid) = 1;
5997 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5998 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5999 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6000 gimple_omp_for_set_clauses (ctx->stmt, c);
6002 /* Emit calls denoting privatized variables and initializing a pointer to
6003 structure that holds private variables as fields after ompdevlow pass. */
6004 if (sctx.is_simt)
6006 sctx.simt_eargs[0] = uid;
6007 gimple *g
6008 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6009 gimple_call_set_lhs (g, uid);
6010 gimple_seq_add_stmt (ilist, g);
6011 sctx.simt_eargs.release ();
6013 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6014 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6015 gimple_call_set_lhs (g, simtrec);
6016 gimple_seq_add_stmt (ilist, g);
6018 if (sctx.lane)
6020 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6021 2 + (nonconst_simd_if != NULL),
6022 uid, integer_zero_node,
6023 nonconst_simd_if);
6024 gimple_call_set_lhs (g, sctx.lane);
6025 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6026 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6027 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6028 build_int_cst (unsigned_type_node, 0));
6029 gimple_seq_add_stmt (ilist, g);
6030 if (sctx.lastlane)
6032 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6033 2, uid, sctx.lane);
6034 gimple_call_set_lhs (g, sctx.lastlane);
6035 gimple_seq_add_stmt (dlist, g);
6036 gimple_seq_add_seq (dlist, llist[3]);
6038 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6039 if (llist[2])
6041 tree simt_vf = create_tmp_var (unsigned_type_node);
6042 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6043 gimple_call_set_lhs (g, simt_vf);
6044 gimple_seq_add_stmt (dlist, g);
6046 tree t = build_int_cst (unsigned_type_node, 1);
6047 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6048 gimple_seq_add_stmt (dlist, g);
6050 t = build_int_cst (unsigned_type_node, 0);
6051 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6052 gimple_seq_add_stmt (dlist, g);
6054 tree body = create_artificial_label (UNKNOWN_LOCATION);
6055 tree header = create_artificial_label (UNKNOWN_LOCATION);
6056 tree end = create_artificial_label (UNKNOWN_LOCATION);
6057 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6058 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6060 gimple_seq_add_seq (dlist, llist[2]);
6062 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6063 gimple_seq_add_stmt (dlist, g);
6065 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6066 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6067 gimple_seq_add_stmt (dlist, g);
6069 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6071 for (int i = 0; i < 2; i++)
6072 if (llist[i])
6074 tree vf = create_tmp_var (unsigned_type_node);
6075 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6076 gimple_call_set_lhs (g, vf);
6077 gimple_seq *seq = i == 0 ? ilist : dlist;
6078 gimple_seq_add_stmt (seq, g);
6079 tree t = build_int_cst (unsigned_type_node, 0);
6080 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6081 gimple_seq_add_stmt (seq, g);
6082 tree body = create_artificial_label (UNKNOWN_LOCATION);
6083 tree header = create_artificial_label (UNKNOWN_LOCATION);
6084 tree end = create_artificial_label (UNKNOWN_LOCATION);
6085 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6086 gimple_seq_add_stmt (seq, gimple_build_label (body));
6087 gimple_seq_add_seq (seq, llist[i]);
6088 t = build_int_cst (unsigned_type_node, 1);
6089 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6090 gimple_seq_add_stmt (seq, g);
6091 gimple_seq_add_stmt (seq, gimple_build_label (header));
6092 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6093 gimple_seq_add_stmt (seq, g);
6094 gimple_seq_add_stmt (seq, gimple_build_label (end));
6097 if (sctx.is_simt)
6099 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6100 gimple *g
6101 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6102 gimple_seq_add_stmt (dlist, g);
6105 /* The copyin sequence is not to be executed by the main thread, since
6106 that would result in self-copies. Perhaps not visible to scalars,
6107 but it certainly is to C++ operator=. */
6108 if (copyin_seq)
6110 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6112 x = build2 (NE_EXPR, boolean_type_node, x,
6113 build_int_cst (TREE_TYPE (x), 0));
6114 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6115 gimplify_and_add (x, ilist);
6118 /* If any copyin variable is passed by reference, we must ensure the
6119 master thread doesn't modify it before it is copied over in all
6120 threads. Similarly for variables in both firstprivate and
6121 lastprivate clauses we need to ensure the lastprivate copying
6122 happens after firstprivate copying in all threads. And similarly
6123 for UDRs if initializer expression refers to omp_orig. */
6124 if (copyin_by_ref || lastprivate_firstprivate
6125 || (reduction_omp_orig_ref
6126 && !ctx->scan_inclusive
6127 && !ctx->scan_exclusive))
6129 /* Don't add any barrier for #pragma omp simd or
6130 #pragma omp distribute. */
6131 if (!is_task_ctx (ctx)
6132 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6133 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6134 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6137 /* If max_vf is non-zero, then we can use only a vectorization factor
6138 up to the max_vf we chose. So stick it into the safelen clause. */
6139 if (maybe_ne (sctx.max_vf, 0U))
6141 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6142 OMP_CLAUSE_SAFELEN);
6143 poly_uint64 safe_len;
6144 if (c == NULL_TREE
6145 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6146 && maybe_gt (safe_len, sctx.max_vf)))
6148 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6149 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6150 sctx.max_vf);
6151 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6152 gimple_omp_for_set_clauses (ctx->stmt, c);
6157 /* Create temporary variables for lastprivate(conditional:) implementation
6158 in context CTX with CLAUSES. */
6160 static void
6161 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6163 tree iter_type = NULL_TREE;
6164 tree cond_ptr = NULL_TREE;
6165 tree iter_var = NULL_TREE;
6166 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6167 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6168 tree next = *clauses;
6169 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6170 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6171 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6173 if (is_simd)
6175 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6176 gcc_assert (cc);
6177 if (iter_type == NULL_TREE)
6179 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6180 iter_var = create_tmp_var_raw (iter_type);
6181 DECL_CONTEXT (iter_var) = current_function_decl;
6182 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6183 DECL_CHAIN (iter_var) = ctx->block_vars;
6184 ctx->block_vars = iter_var;
6185 tree c3
6186 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6187 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6188 OMP_CLAUSE_DECL (c3) = iter_var;
6189 OMP_CLAUSE_CHAIN (c3) = *clauses;
6190 *clauses = c3;
6191 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6193 next = OMP_CLAUSE_CHAIN (cc);
6194 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6195 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6196 ctx->lastprivate_conditional_map->put (o, v);
6197 continue;
6199 if (iter_type == NULL)
6201 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6203 struct omp_for_data fd;
6204 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6205 NULL);
6206 iter_type = unsigned_type_for (fd.iter_type);
6208 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6209 iter_type = unsigned_type_node;
6210 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6211 if (c2)
6213 cond_ptr
6214 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6215 OMP_CLAUSE_DECL (c2) = cond_ptr;
6217 else
6219 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6220 DECL_CONTEXT (cond_ptr) = current_function_decl;
6221 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6222 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6223 ctx->block_vars = cond_ptr;
6224 c2 = build_omp_clause (UNKNOWN_LOCATION,
6225 OMP_CLAUSE__CONDTEMP_);
6226 OMP_CLAUSE_DECL (c2) = cond_ptr;
6227 OMP_CLAUSE_CHAIN (c2) = *clauses;
6228 *clauses = c2;
6230 iter_var = create_tmp_var_raw (iter_type);
6231 DECL_CONTEXT (iter_var) = current_function_decl;
6232 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6233 DECL_CHAIN (iter_var) = ctx->block_vars;
6234 ctx->block_vars = iter_var;
6235 tree c3
6236 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6237 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6238 OMP_CLAUSE_DECL (c3) = iter_var;
6239 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6240 OMP_CLAUSE_CHAIN (c2) = c3;
6241 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6243 tree v = create_tmp_var_raw (iter_type);
6244 DECL_CONTEXT (v) = current_function_decl;
6245 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6246 DECL_CHAIN (v) = ctx->block_vars;
6247 ctx->block_vars = v;
6248 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6249 ctx->lastprivate_conditional_map->put (o, v);
6254 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6255 both parallel and workshare constructs. PREDICATE may be NULL if it's
6256 always true. BODY_P is the sequence to insert early initialization
6257 if needed, STMT_LIST is where the non-conditional lastprivate handling
6258 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6259 section. */
6261 static void
6262 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6263 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6264 omp_context *ctx)
6266 tree x, c, label = NULL, orig_clauses = clauses;
6267 bool par_clauses = false;
6268 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6269 unsigned HOST_WIDE_INT conditional_off = 0;
6270 gimple_seq post_stmt_list = NULL;
6272 /* Early exit if there are no lastprivate or linear clauses. */
6273 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6274 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6275 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6276 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6277 break;
6278 if (clauses == NULL)
6280 /* If this was a workshare clause, see if it had been combined
6281 with its parallel. In that case, look for the clauses on the
6282 parallel statement itself. */
6283 if (is_parallel_ctx (ctx))
6284 return;
6286 ctx = ctx->outer;
6287 if (ctx == NULL || !is_parallel_ctx (ctx))
6288 return;
6290 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6291 OMP_CLAUSE_LASTPRIVATE);
6292 if (clauses == NULL)
6293 return;
6294 par_clauses = true;
6297 bool maybe_simt = false;
6298 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6299 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6301 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6302 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6303 if (simduid)
6304 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6307 if (predicate)
6309 gcond *stmt;
6310 tree label_true, arm1, arm2;
6311 enum tree_code pred_code = TREE_CODE (predicate);
6313 label = create_artificial_label (UNKNOWN_LOCATION);
6314 label_true = create_artificial_label (UNKNOWN_LOCATION);
6315 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6317 arm1 = TREE_OPERAND (predicate, 0);
6318 arm2 = TREE_OPERAND (predicate, 1);
6319 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6320 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6322 else
6324 arm1 = predicate;
6325 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6326 arm2 = boolean_false_node;
6327 pred_code = NE_EXPR;
6329 if (maybe_simt)
6331 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6332 c = fold_convert (integer_type_node, c);
6333 simtcond = create_tmp_var (integer_type_node);
6334 gimplify_assign (simtcond, c, stmt_list);
6335 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6336 1, simtcond);
6337 c = create_tmp_var (integer_type_node);
6338 gimple_call_set_lhs (g, c);
6339 gimple_seq_add_stmt (stmt_list, g);
6340 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6341 label_true, label);
6343 else
6344 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6345 gimple_seq_add_stmt (stmt_list, stmt);
6346 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6349 tree cond_ptr = NULL_TREE;
6350 for (c = clauses; c ;)
6352 tree var, new_var;
6353 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6354 gimple_seq *this_stmt_list = stmt_list;
6355 tree lab2 = NULL_TREE;
6357 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6358 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6359 && ctx->lastprivate_conditional_map
6360 && !ctx->combined_into_simd_safelen1)
6362 gcc_assert (body_p);
6363 if (simduid)
6364 goto next;
6365 if (cond_ptr == NULL_TREE)
6367 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6368 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6370 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6371 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6372 tree v = *ctx->lastprivate_conditional_map->get (o);
6373 gimplify_assign (v, build_zero_cst (type), body_p);
6374 this_stmt_list = cstmt_list;
6375 tree mem;
6376 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6378 mem = build2 (MEM_REF, type, cond_ptr,
6379 build_int_cst (TREE_TYPE (cond_ptr),
6380 conditional_off));
6381 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6383 else
6384 mem = build4 (ARRAY_REF, type, cond_ptr,
6385 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6386 tree mem2 = copy_node (mem);
6387 gimple_seq seq = NULL;
6388 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6389 gimple_seq_add_seq (this_stmt_list, seq);
6390 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6391 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6392 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6393 gimple_seq_add_stmt (this_stmt_list, g);
6394 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6395 gimplify_assign (mem2, v, this_stmt_list);
6397 else if (predicate
6398 && ctx->combined_into_simd_safelen1
6399 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6400 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6401 && ctx->lastprivate_conditional_map)
6402 this_stmt_list = &post_stmt_list;
6404 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6405 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6406 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6408 var = OMP_CLAUSE_DECL (c);
6409 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6410 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6411 && is_taskloop_ctx (ctx))
6413 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6414 new_var = lookup_decl (var, ctx->outer);
6416 else
6418 new_var = lookup_decl (var, ctx);
6419 /* Avoid uninitialized warnings for lastprivate and
6420 for linear iterators. */
6421 if (predicate
6422 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6423 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6424 TREE_NO_WARNING (new_var) = 1;
6427 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6429 tree val = DECL_VALUE_EXPR (new_var);
6430 if (TREE_CODE (val) == ARRAY_REF
6431 && VAR_P (TREE_OPERAND (val, 0))
6432 && lookup_attribute ("omp simd array",
6433 DECL_ATTRIBUTES (TREE_OPERAND (val,
6434 0))))
6436 if (lastlane == NULL)
6438 lastlane = create_tmp_var (unsigned_type_node);
6439 gcall *g
6440 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6441 2, simduid,
6442 TREE_OPERAND (val, 1));
6443 gimple_call_set_lhs (g, lastlane);
6444 gimple_seq_add_stmt (this_stmt_list, g);
6446 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6447 TREE_OPERAND (val, 0), lastlane,
6448 NULL_TREE, NULL_TREE);
6449 TREE_THIS_NOTRAP (new_var) = 1;
6452 else if (maybe_simt)
6454 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6455 ? DECL_VALUE_EXPR (new_var)
6456 : new_var);
6457 if (simtlast == NULL)
6459 simtlast = create_tmp_var (unsigned_type_node);
6460 gcall *g = gimple_build_call_internal
6461 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6462 gimple_call_set_lhs (g, simtlast);
6463 gimple_seq_add_stmt (this_stmt_list, g);
6465 x = build_call_expr_internal_loc
6466 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6467 TREE_TYPE (val), 2, val, simtlast);
6468 new_var = unshare_expr (new_var);
6469 gimplify_assign (new_var, x, this_stmt_list);
6470 new_var = unshare_expr (new_var);
6473 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6474 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6476 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6477 gimple_seq_add_seq (this_stmt_list,
6478 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6479 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6481 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6482 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6484 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6485 gimple_seq_add_seq (this_stmt_list,
6486 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6487 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6490 x = NULL_TREE;
6491 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6492 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6493 && is_taskloop_ctx (ctx))
6495 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6496 ctx->outer->outer);
6497 if (is_global_var (ovar))
6498 x = ovar;
6500 if (!x)
6501 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6502 if (omp_is_reference (var))
6503 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6504 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6505 gimplify_and_add (x, this_stmt_list);
6507 if (lab2)
6508 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6511 next:
6512 c = OMP_CLAUSE_CHAIN (c);
6513 if (c == NULL && !par_clauses)
6515 /* If this was a workshare clause, see if it had been combined
6516 with its parallel. In that case, continue looking for the
6517 clauses also on the parallel statement itself. */
6518 if (is_parallel_ctx (ctx))
6519 break;
6521 ctx = ctx->outer;
6522 if (ctx == NULL || !is_parallel_ctx (ctx))
6523 break;
6525 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6526 OMP_CLAUSE_LASTPRIVATE);
6527 par_clauses = true;
6531 if (label)
6532 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6533 gimple_seq_add_seq (stmt_list, post_stmt_list);
6536 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6537 (which might be a placeholder). INNER is true if this is an inner
6538 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6539 join markers. Generate the before-loop forking sequence in
6540 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6541 general form of these sequences is
6543 GOACC_REDUCTION_SETUP
6544 GOACC_FORK
6545 GOACC_REDUCTION_INIT
6547 GOACC_REDUCTION_FINI
6548 GOACC_JOIN
6549 GOACC_REDUCTION_TEARDOWN. */
6551 static void
6552 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6553 gcall *fork, gcall *join, gimple_seq *fork_seq,
6554 gimple_seq *join_seq, omp_context *ctx)
6556 gimple_seq before_fork = NULL;
6557 gimple_seq after_fork = NULL;
6558 gimple_seq before_join = NULL;
6559 gimple_seq after_join = NULL;
6560 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6561 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6562 unsigned offset = 0;
6564 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6565 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6567 tree orig = OMP_CLAUSE_DECL (c);
6568 tree var = maybe_lookup_decl (orig, ctx);
6569 tree ref_to_res = NULL_TREE;
6570 tree incoming, outgoing, v1, v2, v3;
6571 bool is_private = false;
6573 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6574 if (rcode == MINUS_EXPR)
6575 rcode = PLUS_EXPR;
6576 else if (rcode == TRUTH_ANDIF_EXPR)
6577 rcode = BIT_AND_EXPR;
6578 else if (rcode == TRUTH_ORIF_EXPR)
6579 rcode = BIT_IOR_EXPR;
6580 tree op = build_int_cst (unsigned_type_node, rcode);
6582 if (!var)
6583 var = orig;
6585 incoming = outgoing = var;
6587 if (!inner)
6589 /* See if an outer construct also reduces this variable. */
6590 omp_context *outer = ctx;
6592 while (omp_context *probe = outer->outer)
6594 enum gimple_code type = gimple_code (probe->stmt);
6595 tree cls;
6597 switch (type)
6599 case GIMPLE_OMP_FOR:
6600 cls = gimple_omp_for_clauses (probe->stmt);
6601 break;
6603 case GIMPLE_OMP_TARGET:
6604 if (gimple_omp_target_kind (probe->stmt)
6605 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6606 goto do_lookup;
6608 cls = gimple_omp_target_clauses (probe->stmt);
6609 break;
6611 default:
6612 goto do_lookup;
6615 outer = probe;
6616 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6617 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6618 && orig == OMP_CLAUSE_DECL (cls))
6620 incoming = outgoing = lookup_decl (orig, probe);
6621 goto has_outer_reduction;
6623 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6624 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6625 && orig == OMP_CLAUSE_DECL (cls))
6627 is_private = true;
6628 goto do_lookup;
6632 do_lookup:
6633 /* This is the outermost construct with this reduction,
6634 see if there's a mapping for it. */
6635 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6636 && maybe_lookup_field (orig, outer) && !is_private)
6638 ref_to_res = build_receiver_ref (orig, false, outer);
6639 if (omp_is_reference (orig))
6640 ref_to_res = build_simple_mem_ref (ref_to_res);
6642 tree type = TREE_TYPE (var);
6643 if (POINTER_TYPE_P (type))
6644 type = TREE_TYPE (type);
6646 outgoing = var;
6647 incoming = omp_reduction_init_op (loc, rcode, type);
6649 else
6651 /* Try to look at enclosing contexts for reduction var,
6652 use original if no mapping found. */
6653 tree t = NULL_TREE;
6654 omp_context *c = ctx->outer;
6655 while (c && !t)
6657 t = maybe_lookup_decl (orig, c);
6658 c = c->outer;
6660 incoming = outgoing = (t ? t : orig);
6663 has_outer_reduction:;
6666 if (!ref_to_res)
6667 ref_to_res = integer_zero_node;
6669 if (omp_is_reference (orig))
6671 tree type = TREE_TYPE (var);
6672 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6674 if (!inner)
6676 tree x = create_tmp_var (TREE_TYPE (type), id);
6677 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6680 v1 = create_tmp_var (type, id);
6681 v2 = create_tmp_var (type, id);
6682 v3 = create_tmp_var (type, id);
6684 gimplify_assign (v1, var, fork_seq);
6685 gimplify_assign (v2, var, fork_seq);
6686 gimplify_assign (v3, var, fork_seq);
6688 var = build_simple_mem_ref (var);
6689 v1 = build_simple_mem_ref (v1);
6690 v2 = build_simple_mem_ref (v2);
6691 v3 = build_simple_mem_ref (v3);
6692 outgoing = build_simple_mem_ref (outgoing);
6694 if (!TREE_CONSTANT (incoming))
6695 incoming = build_simple_mem_ref (incoming);
6697 else
6698 v1 = v2 = v3 = var;
6700 /* Determine position in reduction buffer, which may be used
6701 by target. The parser has ensured that this is not a
6702 variable-sized type. */
6703 fixed_size_mode mode
6704 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6705 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6706 offset = (offset + align - 1) & ~(align - 1);
6707 tree off = build_int_cst (sizetype, offset);
6708 offset += GET_MODE_SIZE (mode);
6710 if (!init_code)
6712 init_code = build_int_cst (integer_type_node,
6713 IFN_GOACC_REDUCTION_INIT);
6714 fini_code = build_int_cst (integer_type_node,
6715 IFN_GOACC_REDUCTION_FINI);
6716 setup_code = build_int_cst (integer_type_node,
6717 IFN_GOACC_REDUCTION_SETUP);
6718 teardown_code = build_int_cst (integer_type_node,
6719 IFN_GOACC_REDUCTION_TEARDOWN);
6722 tree setup_call
6723 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6724 TREE_TYPE (var), 6, setup_code,
6725 unshare_expr (ref_to_res),
6726 incoming, level, op, off);
6727 tree init_call
6728 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6729 TREE_TYPE (var), 6, init_code,
6730 unshare_expr (ref_to_res),
6731 v1, level, op, off);
6732 tree fini_call
6733 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6734 TREE_TYPE (var), 6, fini_code,
6735 unshare_expr (ref_to_res),
6736 v2, level, op, off);
6737 tree teardown_call
6738 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6739 TREE_TYPE (var), 6, teardown_code,
6740 ref_to_res, v3, level, op, off);
6742 gimplify_assign (v1, setup_call, &before_fork);
6743 gimplify_assign (v2, init_call, &after_fork);
6744 gimplify_assign (v3, fini_call, &before_join);
6745 gimplify_assign (outgoing, teardown_call, &after_join);
6748 /* Now stitch things together. */
6749 gimple_seq_add_seq (fork_seq, before_fork);
6750 if (fork)
6751 gimple_seq_add_stmt (fork_seq, fork);
6752 gimple_seq_add_seq (fork_seq, after_fork);
6754 gimple_seq_add_seq (join_seq, before_join);
6755 if (join)
6756 gimple_seq_add_stmt (join_seq, join);
6757 gimple_seq_add_seq (join_seq, after_join);
6760 /* Generate code to implement the REDUCTION clauses, append it
6761 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6762 that should be emitted also inside of the critical section,
6763 in that case clear *CLIST afterwards, otherwise leave it as is
6764 and let the caller emit it itself. */
6766 static void
6767 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6768 gimple_seq *clist, omp_context *ctx)
6770 gimple_seq sub_seq = NULL;
6771 gimple *stmt;
6772 tree x, c;
6773 int count = 0;
6775 /* OpenACC loop reductions are handled elsewhere. */
6776 if (is_gimple_omp_oacc (ctx->stmt))
6777 return;
6779 /* SIMD reductions are handled in lower_rec_input_clauses. */
6780 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6781 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6782 return;
6784 /* inscan reductions are handled elsewhere. */
6785 if (ctx->scan_inclusive || ctx->scan_exclusive)
6786 return;
6788 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6789 update in that case, otherwise use a lock. */
6790 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6791 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6792 && !OMP_CLAUSE_REDUCTION_TASK (c))
6794 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6795 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6797 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6798 count = -1;
6799 break;
6801 count++;
6804 if (count == 0)
6805 return;
6807 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6809 tree var, ref, new_var, orig_var;
6810 enum tree_code code;
6811 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6813 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6814 || OMP_CLAUSE_REDUCTION_TASK (c))
6815 continue;
6817 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6818 orig_var = var = OMP_CLAUSE_DECL (c);
6819 if (TREE_CODE (var) == MEM_REF)
6821 var = TREE_OPERAND (var, 0);
6822 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6823 var = TREE_OPERAND (var, 0);
6824 if (TREE_CODE (var) == ADDR_EXPR)
6825 var = TREE_OPERAND (var, 0);
6826 else
6828 /* If this is a pointer or referenced based array
6829 section, the var could be private in the outer
6830 context e.g. on orphaned loop construct. Pretend this
6831 is private variable's outer reference. */
6832 ccode = OMP_CLAUSE_PRIVATE;
6833 if (TREE_CODE (var) == INDIRECT_REF)
6834 var = TREE_OPERAND (var, 0);
6836 orig_var = var;
6837 if (is_variable_sized (var))
6839 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6840 var = DECL_VALUE_EXPR (var);
6841 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6842 var = TREE_OPERAND (var, 0);
6843 gcc_assert (DECL_P (var));
6846 new_var = lookup_decl (var, ctx);
6847 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6848 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6849 ref = build_outer_var_ref (var, ctx, ccode);
6850 code = OMP_CLAUSE_REDUCTION_CODE (c);
6852 /* reduction(-:var) sums up the partial results, so it acts
6853 identically to reduction(+:var). */
6854 if (code == MINUS_EXPR)
6855 code = PLUS_EXPR;
6857 if (count == 1)
6859 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6861 addr = save_expr (addr);
6862 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6863 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6864 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6865 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6866 gimplify_and_add (x, stmt_seqp);
6867 return;
6869 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6871 tree d = OMP_CLAUSE_DECL (c);
6872 tree type = TREE_TYPE (d);
6873 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6874 tree i = create_tmp_var (TREE_TYPE (v));
6875 tree ptype = build_pointer_type (TREE_TYPE (type));
6876 tree bias = TREE_OPERAND (d, 1);
6877 d = TREE_OPERAND (d, 0);
6878 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6880 tree b = TREE_OPERAND (d, 1);
6881 b = maybe_lookup_decl (b, ctx);
6882 if (b == NULL)
6884 b = TREE_OPERAND (d, 1);
6885 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6887 if (integer_zerop (bias))
6888 bias = b;
6889 else
6891 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6892 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6893 TREE_TYPE (b), b, bias);
6895 d = TREE_OPERAND (d, 0);
6897 /* For ref build_outer_var_ref already performs this, so
6898 only new_var needs a dereference. */
6899 if (TREE_CODE (d) == INDIRECT_REF)
6901 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6902 gcc_assert (omp_is_reference (var) && var == orig_var);
6904 else if (TREE_CODE (d) == ADDR_EXPR)
6906 if (orig_var == var)
6908 new_var = build_fold_addr_expr (new_var);
6909 ref = build_fold_addr_expr (ref);
6912 else
6914 gcc_assert (orig_var == var);
6915 if (omp_is_reference (var))
6916 ref = build_fold_addr_expr (ref);
6918 if (DECL_P (v))
6920 tree t = maybe_lookup_decl (v, ctx);
6921 if (t)
6922 v = t;
6923 else
6924 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6925 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6927 if (!integer_zerop (bias))
6929 bias = fold_convert_loc (clause_loc, sizetype, bias);
6930 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6931 TREE_TYPE (new_var), new_var,
6932 unshare_expr (bias));
6933 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6934 TREE_TYPE (ref), ref, bias);
6936 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6937 ref = fold_convert_loc (clause_loc, ptype, ref);
6938 tree m = create_tmp_var (ptype);
6939 gimplify_assign (m, new_var, stmt_seqp);
6940 new_var = m;
6941 m = create_tmp_var (ptype);
6942 gimplify_assign (m, ref, stmt_seqp);
6943 ref = m;
6944 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6945 tree body = create_artificial_label (UNKNOWN_LOCATION);
6946 tree end = create_artificial_label (UNKNOWN_LOCATION);
6947 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6948 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6949 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6950 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6952 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6953 tree decl_placeholder
6954 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6955 SET_DECL_VALUE_EXPR (placeholder, out);
6956 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6957 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6958 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6959 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6960 gimple_seq_add_seq (&sub_seq,
6961 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6962 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6963 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6964 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6966 else
6968 x = build2 (code, TREE_TYPE (out), out, priv);
6969 out = unshare_expr (out);
6970 gimplify_assign (out, x, &sub_seq);
6972 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6973 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6974 gimple_seq_add_stmt (&sub_seq, g);
6975 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6976 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6977 gimple_seq_add_stmt (&sub_seq, g);
6978 g = gimple_build_assign (i, PLUS_EXPR, i,
6979 build_int_cst (TREE_TYPE (i), 1));
6980 gimple_seq_add_stmt (&sub_seq, g);
6981 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6982 gimple_seq_add_stmt (&sub_seq, g);
6983 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6985 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6987 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6989 if (omp_is_reference (var)
6990 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6991 TREE_TYPE (ref)))
6992 ref = build_fold_addr_expr_loc (clause_loc, ref);
6993 SET_DECL_VALUE_EXPR (placeholder, ref);
6994 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6995 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6996 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6997 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6998 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7000 else
7002 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7003 ref = build_outer_var_ref (var, ctx);
7004 gimplify_assign (ref, x, &sub_seq);
7008 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7010 gimple_seq_add_stmt (stmt_seqp, stmt);
7012 gimple_seq_add_seq (stmt_seqp, sub_seq);
7014 if (clist)
7016 gimple_seq_add_seq (stmt_seqp, *clist);
7017 *clist = NULL;
7020 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7022 gimple_seq_add_stmt (stmt_seqp, stmt);
7026 /* Generate code to implement the COPYPRIVATE clauses. */
7028 static void
7029 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7030 omp_context *ctx)
7032 tree c;
7034 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7036 tree var, new_var, ref, x;
7037 bool by_ref;
7038 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7040 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7041 continue;
7043 var = OMP_CLAUSE_DECL (c);
7044 by_ref = use_pointer_for_field (var, NULL);
7046 ref = build_sender_ref (var, ctx);
7047 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7048 if (by_ref)
7050 x = build_fold_addr_expr_loc (clause_loc, new_var);
7051 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7053 gimplify_assign (ref, x, slist);
7055 ref = build_receiver_ref (var, false, ctx);
7056 if (by_ref)
7058 ref = fold_convert_loc (clause_loc,
7059 build_pointer_type (TREE_TYPE (new_var)),
7060 ref);
7061 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7063 if (omp_is_reference (var))
7065 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7066 ref = build_simple_mem_ref_loc (clause_loc, ref);
7067 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7069 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7070 gimplify_and_add (x, rlist);
7075 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7076 and REDUCTION from the sender (aka parent) side. */
7078 static void
7079 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7080 omp_context *ctx)
7082 tree c, t;
7083 int ignored_looptemp = 0;
7084 bool is_taskloop = false;
7086 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7087 by GOMP_taskloop. */
7088 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7090 ignored_looptemp = 2;
7091 is_taskloop = true;
7094 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7096 tree val, ref, x, var;
7097 bool by_ref, do_in = false, do_out = false;
7098 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7100 switch (OMP_CLAUSE_CODE (c))
7102 case OMP_CLAUSE_PRIVATE:
7103 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7104 break;
7105 continue;
7106 case OMP_CLAUSE_FIRSTPRIVATE:
7107 case OMP_CLAUSE_COPYIN:
7108 case OMP_CLAUSE_LASTPRIVATE:
7109 case OMP_CLAUSE_IN_REDUCTION:
7110 case OMP_CLAUSE__REDUCTEMP_:
7111 break;
7112 case OMP_CLAUSE_REDUCTION:
7113 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7114 continue;
7115 break;
7116 case OMP_CLAUSE_SHARED:
7117 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7118 break;
7119 continue;
7120 case OMP_CLAUSE__LOOPTEMP_:
7121 if (ignored_looptemp)
7123 ignored_looptemp--;
7124 continue;
7126 break;
7127 default:
7128 continue;
7131 val = OMP_CLAUSE_DECL (c);
7132 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7133 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7134 && TREE_CODE (val) == MEM_REF)
7136 val = TREE_OPERAND (val, 0);
7137 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7138 val = TREE_OPERAND (val, 0);
7139 if (TREE_CODE (val) == INDIRECT_REF
7140 || TREE_CODE (val) == ADDR_EXPR)
7141 val = TREE_OPERAND (val, 0);
7142 if (is_variable_sized (val))
7143 continue;
7146 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7147 outer taskloop region. */
7148 omp_context *ctx_for_o = ctx;
7149 if (is_taskloop
7150 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7151 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7152 ctx_for_o = ctx->outer;
7154 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7156 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7157 && is_global_var (var)
7158 && (val == OMP_CLAUSE_DECL (c)
7159 || !is_task_ctx (ctx)
7160 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7161 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7162 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7163 != POINTER_TYPE)))))
7164 continue;
7166 t = omp_member_access_dummy_var (var);
7167 if (t)
7169 var = DECL_VALUE_EXPR (var);
7170 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7171 if (o != t)
7172 var = unshare_and_remap (var, t, o);
7173 else
7174 var = unshare_expr (var);
7177 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7179 /* Handle taskloop firstprivate/lastprivate, where the
7180 lastprivate on GIMPLE_OMP_TASK is represented as
7181 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7182 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7183 x = omp_build_component_ref (ctx->sender_decl, f);
7184 if (use_pointer_for_field (val, ctx))
7185 var = build_fold_addr_expr (var);
7186 gimplify_assign (x, var, ilist);
7187 DECL_ABSTRACT_ORIGIN (f) = NULL;
7188 continue;
7191 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7192 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7193 || val == OMP_CLAUSE_DECL (c))
7194 && is_variable_sized (val))
7195 continue;
7196 by_ref = use_pointer_for_field (val, NULL);
7198 switch (OMP_CLAUSE_CODE (c))
7200 case OMP_CLAUSE_FIRSTPRIVATE:
7201 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7202 && !by_ref
7203 && is_task_ctx (ctx))
7204 TREE_NO_WARNING (var) = 1;
7205 do_in = true;
7206 break;
7208 case OMP_CLAUSE_PRIVATE:
7209 case OMP_CLAUSE_COPYIN:
7210 case OMP_CLAUSE__LOOPTEMP_:
7211 case OMP_CLAUSE__REDUCTEMP_:
7212 do_in = true;
7213 break;
7215 case OMP_CLAUSE_LASTPRIVATE:
7216 if (by_ref || omp_is_reference (val))
7218 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7219 continue;
7220 do_in = true;
7222 else
7224 do_out = true;
7225 if (lang_hooks.decls.omp_private_outer_ref (val))
7226 do_in = true;
7228 break;
7230 case OMP_CLAUSE_REDUCTION:
7231 case OMP_CLAUSE_IN_REDUCTION:
7232 do_in = true;
7233 if (val == OMP_CLAUSE_DECL (c))
7235 if (is_task_ctx (ctx))
7236 by_ref = use_pointer_for_field (val, ctx);
7237 else
7238 do_out = !(by_ref || omp_is_reference (val));
7240 else
7241 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7242 break;
7244 default:
7245 gcc_unreachable ();
7248 if (do_in)
7250 ref = build_sender_ref (val, ctx);
7251 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7252 gimplify_assign (ref, x, ilist);
7253 if (is_task_ctx (ctx))
7254 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7257 if (do_out)
7259 ref = build_sender_ref (val, ctx);
7260 gimplify_assign (var, ref, olist);
7265 /* Generate code to implement SHARED from the sender (aka parent)
7266 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7267 list things that got automatically shared. */
7269 static void
7270 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7272 tree var, ovar, nvar, t, f, x, record_type;
7274 if (ctx->record_type == NULL)
7275 return;
7277 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7278 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7280 ovar = DECL_ABSTRACT_ORIGIN (f);
7281 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7282 continue;
7284 nvar = maybe_lookup_decl (ovar, ctx);
7285 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7286 continue;
7288 /* If CTX is a nested parallel directive. Find the immediately
7289 enclosing parallel or workshare construct that contains a
7290 mapping for OVAR. */
7291 var = lookup_decl_in_outer_ctx (ovar, ctx);
7293 t = omp_member_access_dummy_var (var);
7294 if (t)
7296 var = DECL_VALUE_EXPR (var);
7297 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7298 if (o != t)
7299 var = unshare_and_remap (var, t, o);
7300 else
7301 var = unshare_expr (var);
7304 if (use_pointer_for_field (ovar, ctx))
7306 x = build_sender_ref (ovar, ctx);
7307 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7308 && TREE_TYPE (f) == TREE_TYPE (ovar))
7310 gcc_assert (is_parallel_ctx (ctx)
7311 && DECL_ARTIFICIAL (ovar));
7312 /* _condtemp_ clause. */
7313 var = build_constructor (TREE_TYPE (x), NULL);
7315 else
7316 var = build_fold_addr_expr (var);
7317 gimplify_assign (x, var, ilist);
7319 else
7321 x = build_sender_ref (ovar, ctx);
7322 gimplify_assign (x, var, ilist);
7324 if (!TREE_READONLY (var)
7325 /* We don't need to receive a new reference to a result
7326 or parm decl. In fact we may not store to it as we will
7327 invalidate any pending RSO and generate wrong gimple
7328 during inlining. */
7329 && !((TREE_CODE (var) == RESULT_DECL
7330 || TREE_CODE (var) == PARM_DECL)
7331 && DECL_BY_REFERENCE (var)))
7333 x = build_sender_ref (ovar, ctx);
7334 gimplify_assign (var, x, olist);
7340 /* Emit an OpenACC head marker call, encapulating the partitioning and
7341 other information that must be processed by the target compiler.
7342 Return the maximum number of dimensions the associated loop might
7343 be partitioned over. */
7345 static unsigned
7346 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7347 gimple_seq *seq, omp_context *ctx)
7349 unsigned levels = 0;
7350 unsigned tag = 0;
7351 tree gang_static = NULL_TREE;
7352 auto_vec<tree, 5> args;
7354 args.quick_push (build_int_cst
7355 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7356 args.quick_push (ddvar);
7357 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7359 switch (OMP_CLAUSE_CODE (c))
7361 case OMP_CLAUSE_GANG:
7362 tag |= OLF_DIM_GANG;
7363 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7364 /* static:* is represented by -1, and we can ignore it, as
7365 scheduling is always static. */
7366 if (gang_static && integer_minus_onep (gang_static))
7367 gang_static = NULL_TREE;
7368 levels++;
7369 break;
7371 case OMP_CLAUSE_WORKER:
7372 tag |= OLF_DIM_WORKER;
7373 levels++;
7374 break;
7376 case OMP_CLAUSE_VECTOR:
7377 tag |= OLF_DIM_VECTOR;
7378 levels++;
7379 break;
7381 case OMP_CLAUSE_SEQ:
7382 tag |= OLF_SEQ;
7383 break;
7385 case OMP_CLAUSE_AUTO:
7386 tag |= OLF_AUTO;
7387 break;
7389 case OMP_CLAUSE_INDEPENDENT:
7390 tag |= OLF_INDEPENDENT;
7391 break;
7393 case OMP_CLAUSE_TILE:
7394 tag |= OLF_TILE;
7395 break;
7397 default:
7398 continue;
7402 if (gang_static)
7404 if (DECL_P (gang_static))
7405 gang_static = build_outer_var_ref (gang_static, ctx);
7406 tag |= OLF_GANG_STATIC;
7409 /* In a parallel region, loops are implicitly INDEPENDENT. */
7410 omp_context *tgt = enclosing_target_ctx (ctx);
7411 if (!tgt || is_oacc_parallel (tgt))
7412 tag |= OLF_INDEPENDENT;
7414 if (tag & OLF_TILE)
7415 /* Tiling could use all 3 levels. */
7416 levels = 3;
7417 else
7419 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7420 Ensure at least one level, or 2 for possible auto
7421 partitioning */
7422 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7423 << OLF_DIM_BASE) | OLF_SEQ));
7425 if (levels < 1u + maybe_auto)
7426 levels = 1u + maybe_auto;
7429 args.quick_push (build_int_cst (integer_type_node, levels));
7430 args.quick_push (build_int_cst (integer_type_node, tag));
7431 if (gang_static)
7432 args.quick_push (gang_static);
7434 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7435 gimple_set_location (call, loc);
7436 gimple_set_lhs (call, ddvar);
7437 gimple_seq_add_stmt (seq, call);
7439 return levels;
7442 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7443 partitioning level of the enclosed region. */
7445 static void
7446 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7447 tree tofollow, gimple_seq *seq)
7449 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7450 : IFN_UNIQUE_OACC_TAIL_MARK);
7451 tree marker = build_int_cst (integer_type_node, marker_kind);
7452 int nargs = 2 + (tofollow != NULL_TREE);
7453 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7454 marker, ddvar, tofollow);
7455 gimple_set_location (call, loc);
7456 gimple_set_lhs (call, ddvar);
7457 gimple_seq_add_stmt (seq, call);
7460 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7461 the loop clauses, from which we extract reductions. Initialize
7462 HEAD and TAIL. */
7464 static void
7465 lower_oacc_head_tail (location_t loc, tree clauses,
7466 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7468 bool inner = false;
7469 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7470 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7472 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7473 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7474 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7476 gcc_assert (count);
7477 for (unsigned done = 1; count; count--, done++)
7479 gimple_seq fork_seq = NULL;
7480 gimple_seq join_seq = NULL;
7482 tree place = build_int_cst (integer_type_node, -1);
7483 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7484 fork_kind, ddvar, place);
7485 gimple_set_location (fork, loc);
7486 gimple_set_lhs (fork, ddvar);
7488 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7489 join_kind, ddvar, place);
7490 gimple_set_location (join, loc);
7491 gimple_set_lhs (join, ddvar);
7493 /* Mark the beginning of this level sequence. */
7494 if (inner)
7495 lower_oacc_loop_marker (loc, ddvar, true,
7496 build_int_cst (integer_type_node, count),
7497 &fork_seq);
7498 lower_oacc_loop_marker (loc, ddvar, false,
7499 build_int_cst (integer_type_node, done),
7500 &join_seq);
7502 lower_oacc_reductions (loc, clauses, place, inner,
7503 fork, join, &fork_seq, &join_seq, ctx);
7505 /* Append this level to head. */
7506 gimple_seq_add_seq (head, fork_seq);
7507 /* Prepend it to tail. */
7508 gimple_seq_add_seq (&join_seq, *tail);
7509 *tail = join_seq;
7511 inner = true;
7514 /* Mark the end of the sequence. */
7515 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7516 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7519 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7520 catch handler and return it. This prevents programs from violating the
7521 structured block semantics with throws. */
7523 static gimple_seq
7524 maybe_catch_exception (gimple_seq body)
7526 gimple *g;
7527 tree decl;
7529 if (!flag_exceptions)
7530 return body;
7532 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7533 decl = lang_hooks.eh_protect_cleanup_actions ();
7534 else
7535 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7537 g = gimple_build_eh_must_not_throw (decl);
7538 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7539 GIMPLE_TRY_CATCH);
7541 return gimple_seq_alloc_with_stmt (g);
7545 /* Routines to lower OMP directives into OMP-GIMPLE. */
7547 /* If ctx is a worksharing context inside of a cancellable parallel
7548 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7549 and conditional branch to parallel's cancel_label to handle
7550 cancellation in the implicit barrier. */
7552 static void
7553 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7554 gimple_seq *body)
7556 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7557 if (gimple_omp_return_nowait_p (omp_return))
7558 return;
7559 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7560 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7561 && outer->cancellable)
7563 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7564 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7565 tree lhs = create_tmp_var (c_bool_type);
7566 gimple_omp_return_set_lhs (omp_return, lhs);
7567 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7568 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7569 fold_convert (c_bool_type,
7570 boolean_false_node),
7571 outer->cancel_label, fallthru_label);
7572 gimple_seq_add_stmt (body, g);
7573 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7575 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7576 return;
7579 /* Find the first task_reduction or reduction clause or return NULL
7580 if there are none. */
7582 static inline tree
7583 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7584 enum omp_clause_code ccode)
7586 while (1)
7588 clauses = omp_find_clause (clauses, ccode);
7589 if (clauses == NULL_TREE)
7590 return NULL_TREE;
7591 if (ccode != OMP_CLAUSE_REDUCTION
7592 || code == OMP_TASKLOOP
7593 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7594 return clauses;
7595 clauses = OMP_CLAUSE_CHAIN (clauses);
7599 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7600 gimple_seq *, gimple_seq *);
7602 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7603 CTX is the enclosing OMP context for the current statement. */
7605 static void
7606 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7608 tree block, control;
7609 gimple_stmt_iterator tgsi;
7610 gomp_sections *stmt;
7611 gimple *t;
7612 gbind *new_stmt, *bind;
7613 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7615 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7617 push_gimplify_context ();
7619 dlist = NULL;
7620 ilist = NULL;
7622 tree rclauses
7623 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7624 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7625 tree rtmp = NULL_TREE;
7626 if (rclauses)
7628 tree type = build_pointer_type (pointer_sized_int_node);
7629 tree temp = create_tmp_var (type);
7630 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7631 OMP_CLAUSE_DECL (c) = temp;
7632 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7633 gimple_omp_sections_set_clauses (stmt, c);
7634 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7635 gimple_omp_sections_clauses (stmt),
7636 &ilist, &tred_dlist);
7637 rclauses = c;
7638 rtmp = make_ssa_name (type);
7639 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7642 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7643 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7645 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7646 &ilist, &dlist, ctx, NULL);
7648 control = create_tmp_var (unsigned_type_node, ".section");
7649 gimple_omp_sections_set_control (stmt, control);
7651 new_body = gimple_omp_body (stmt);
7652 gimple_omp_set_body (stmt, NULL);
7653 tgsi = gsi_start (new_body);
7654 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7656 omp_context *sctx;
7657 gimple *sec_start;
7659 sec_start = gsi_stmt (tgsi);
7660 sctx = maybe_lookup_ctx (sec_start);
7661 gcc_assert (sctx);
7663 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7664 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7665 GSI_CONTINUE_LINKING);
7666 gimple_omp_set_body (sec_start, NULL);
7668 if (gsi_one_before_end_p (tgsi))
7670 gimple_seq l = NULL;
7671 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7672 &ilist, &l, &clist, ctx);
7673 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7674 gimple_omp_section_set_last (sec_start);
7677 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7678 GSI_CONTINUE_LINKING);
7681 block = make_node (BLOCK);
7682 bind = gimple_build_bind (NULL, new_body, block);
7684 olist = NULL;
7685 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7686 &clist, ctx);
7687 if (clist)
7689 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7690 gcall *g = gimple_build_call (fndecl, 0);
7691 gimple_seq_add_stmt (&olist, g);
7692 gimple_seq_add_seq (&olist, clist);
7693 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7694 g = gimple_build_call (fndecl, 0);
7695 gimple_seq_add_stmt (&olist, g);
7698 block = make_node (BLOCK);
7699 new_stmt = gimple_build_bind (NULL, NULL, block);
7700 gsi_replace (gsi_p, new_stmt, true);
7702 pop_gimplify_context (new_stmt);
7703 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7704 BLOCK_VARS (block) = gimple_bind_vars (bind);
7705 if (BLOCK_VARS (block))
7706 TREE_USED (block) = 1;
7708 new_body = NULL;
7709 gimple_seq_add_seq (&new_body, ilist);
7710 gimple_seq_add_stmt (&new_body, stmt);
7711 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7712 gimple_seq_add_stmt (&new_body, bind);
7714 t = gimple_build_omp_continue (control, control);
7715 gimple_seq_add_stmt (&new_body, t);
7717 gimple_seq_add_seq (&new_body, olist);
7718 if (ctx->cancellable)
7719 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7720 gimple_seq_add_seq (&new_body, dlist);
7722 new_body = maybe_catch_exception (new_body);
7724 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7725 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7726 t = gimple_build_omp_return (nowait);
7727 gimple_seq_add_stmt (&new_body, t);
7728 gimple_seq_add_seq (&new_body, tred_dlist);
7729 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7731 if (rclauses)
7732 OMP_CLAUSE_DECL (rclauses) = rtmp;
7734 gimple_bind_set_body (new_stmt, new_body);
7738 /* A subroutine of lower_omp_single. Expand the simple form of
7739 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7741 if (GOMP_single_start ())
7742 BODY;
7743 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7745 FIXME. It may be better to delay expanding the logic of this until
7746 pass_expand_omp. The expanded logic may make the job more difficult
7747 to a synchronization analysis pass. */
7749 static void
7750 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7752 location_t loc = gimple_location (single_stmt);
7753 tree tlabel = create_artificial_label (loc);
7754 tree flabel = create_artificial_label (loc);
7755 gimple *call, *cond;
7756 tree lhs, decl;
7758 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7759 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7760 call = gimple_build_call (decl, 0);
7761 gimple_call_set_lhs (call, lhs);
7762 gimple_seq_add_stmt (pre_p, call);
7764 cond = gimple_build_cond (EQ_EXPR, lhs,
7765 fold_convert_loc (loc, TREE_TYPE (lhs),
7766 boolean_true_node),
7767 tlabel, flabel);
7768 gimple_seq_add_stmt (pre_p, cond);
7769 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7770 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7771 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7775 /* A subroutine of lower_omp_single. Expand the simple form of
7776 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7778 #pragma omp single copyprivate (a, b, c)
7780 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7783 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7785 BODY;
7786 copyout.a = a;
7787 copyout.b = b;
7788 copyout.c = c;
7789 GOMP_single_copy_end (&copyout);
7791 else
7793 a = copyout_p->a;
7794 b = copyout_p->b;
7795 c = copyout_p->c;
7797 GOMP_barrier ();
7800 FIXME. It may be better to delay expanding the logic of this until
7801 pass_expand_omp. The expanded logic may make the job more difficult
7802 to a synchronization analysis pass. */
7804 static void
7805 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7806 omp_context *ctx)
7808 tree ptr_type, t, l0, l1, l2, bfn_decl;
7809 gimple_seq copyin_seq;
7810 location_t loc = gimple_location (single_stmt);
7812 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7814 ptr_type = build_pointer_type (ctx->record_type);
7815 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7817 l0 = create_artificial_label (loc);
7818 l1 = create_artificial_label (loc);
7819 l2 = create_artificial_label (loc);
7821 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7822 t = build_call_expr_loc (loc, bfn_decl, 0);
7823 t = fold_convert_loc (loc, ptr_type, t);
7824 gimplify_assign (ctx->receiver_decl, t, pre_p);
7826 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7827 build_int_cst (ptr_type, 0));
7828 t = build3 (COND_EXPR, void_type_node, t,
7829 build_and_jump (&l0), build_and_jump (&l1));
7830 gimplify_and_add (t, pre_p);
7832 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7834 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7836 copyin_seq = NULL;
7837 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7838 &copyin_seq, ctx);
7840 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7841 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7842 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7843 gimplify_and_add (t, pre_p);
7845 t = build_and_jump (&l2);
7846 gimplify_and_add (t, pre_p);
7848 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7850 gimple_seq_add_seq (pre_p, copyin_seq);
7852 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7856 /* Expand code for an OpenMP single directive. */
7858 static void
7859 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7861 tree block;
7862 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7863 gbind *bind;
7864 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7866 push_gimplify_context ();
7868 block = make_node (BLOCK);
7869 bind = gimple_build_bind (NULL, NULL, block);
7870 gsi_replace (gsi_p, bind, true);
7871 bind_body = NULL;
7872 dlist = NULL;
7873 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7874 &bind_body, &dlist, ctx, NULL);
7875 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7877 gimple_seq_add_stmt (&bind_body, single_stmt);
7879 if (ctx->record_type)
7880 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7881 else
7882 lower_omp_single_simple (single_stmt, &bind_body);
7884 gimple_omp_set_body (single_stmt, NULL);
7886 gimple_seq_add_seq (&bind_body, dlist);
7888 bind_body = maybe_catch_exception (bind_body);
7890 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7891 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7892 gimple *g = gimple_build_omp_return (nowait);
7893 gimple_seq_add_stmt (&bind_body_tail, g);
7894 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7895 if (ctx->record_type)
7897 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7898 tree clobber = build_clobber (ctx->record_type);
7899 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7900 clobber), GSI_SAME_STMT);
7902 gimple_seq_add_seq (&bind_body, bind_body_tail);
7903 gimple_bind_set_body (bind, bind_body);
7905 pop_gimplify_context (bind);
7907 gimple_bind_append_vars (bind, ctx->block_vars);
7908 BLOCK_VARS (block) = ctx->block_vars;
7909 if (BLOCK_VARS (block))
7910 TREE_USED (block) = 1;
7914 /* Expand code for an OpenMP master directive. */
7916 static void
7917 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7919 tree block, lab = NULL, x, bfn_decl;
7920 gimple *stmt = gsi_stmt (*gsi_p);
7921 gbind *bind;
7922 location_t loc = gimple_location (stmt);
7923 gimple_seq tseq;
7925 push_gimplify_context ();
7927 block = make_node (BLOCK);
7928 bind = gimple_build_bind (NULL, NULL, block);
7929 gsi_replace (gsi_p, bind, true);
7930 gimple_bind_add_stmt (bind, stmt);
7932 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7933 x = build_call_expr_loc (loc, bfn_decl, 0);
7934 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7935 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7936 tseq = NULL;
7937 gimplify_and_add (x, &tseq);
7938 gimple_bind_add_seq (bind, tseq);
7940 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7941 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7942 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7943 gimple_omp_set_body (stmt, NULL);
7945 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7947 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7949 pop_gimplify_context (bind);
7951 gimple_bind_append_vars (bind, ctx->block_vars);
7952 BLOCK_VARS (block) = ctx->block_vars;
7955 /* Helper function for lower_omp_task_reductions. For a specific PASS
7956 find out the current clause it should be processed, or return false
7957 if all have been processed already. */
7959 static inline bool
7960 omp_task_reduction_iterate (int pass, enum tree_code code,
7961 enum omp_clause_code ccode, tree *c, tree *decl,
7962 tree *type, tree *next)
7964 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7966 if (ccode == OMP_CLAUSE_REDUCTION
7967 && code != OMP_TASKLOOP
7968 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7969 continue;
7970 *decl = OMP_CLAUSE_DECL (*c);
7971 *type = TREE_TYPE (*decl);
7972 if (TREE_CODE (*decl) == MEM_REF)
7974 if (pass != 1)
7975 continue;
7977 else
7979 if (omp_is_reference (*decl))
7980 *type = TREE_TYPE (*type);
7981 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7982 continue;
7984 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7985 return true;
7987 *decl = NULL_TREE;
7988 *type = NULL_TREE;
7989 *next = NULL_TREE;
7990 return false;
7993 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7994 OMP_TASKGROUP only with task modifier). Register mapping of those in
7995 START sequence and reducing them and unregister them in the END sequence. */
7997 static void
7998 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7999 gimple_seq *start, gimple_seq *end)
8001 enum omp_clause_code ccode
8002 = (code == OMP_TASKGROUP
8003 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8004 tree cancellable = NULL_TREE;
8005 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8006 if (clauses == NULL_TREE)
8007 return;
8008 if (code == OMP_FOR || code == OMP_SECTIONS)
8010 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8011 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8012 && outer->cancellable)
8014 cancellable = error_mark_node;
8015 break;
8017 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8018 break;
8020 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8021 tree *last = &TYPE_FIELDS (record_type);
8022 unsigned cnt = 0;
8023 if (cancellable)
8025 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8026 ptr_type_node);
8027 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8028 integer_type_node);
8029 *last = field;
8030 DECL_CHAIN (field) = ifield;
8031 last = &DECL_CHAIN (ifield);
8032 DECL_CONTEXT (field) = record_type;
8033 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8034 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8035 DECL_CONTEXT (ifield) = record_type;
8036 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8037 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8039 for (int pass = 0; pass < 2; pass++)
8041 tree decl, type, next;
8042 for (tree c = clauses;
8043 omp_task_reduction_iterate (pass, code, ccode,
8044 &c, &decl, &type, &next); c = next)
8046 ++cnt;
8047 tree new_type = type;
8048 if (ctx->outer)
8049 new_type = remap_type (type, &ctx->outer->cb);
8050 tree field
8051 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8052 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8053 new_type);
8054 if (DECL_P (decl) && type == TREE_TYPE (decl))
8056 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8057 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8058 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8060 else
8061 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8062 DECL_CONTEXT (field) = record_type;
8063 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8064 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8065 *last = field;
8066 last = &DECL_CHAIN (field);
8067 tree bfield
8068 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8069 boolean_type_node);
8070 DECL_CONTEXT (bfield) = record_type;
8071 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8072 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8073 *last = bfield;
8074 last = &DECL_CHAIN (bfield);
8077 *last = NULL_TREE;
8078 layout_type (record_type);
8080 /* Build up an array which registers with the runtime all the reductions
8081 and deregisters them at the end. Format documented in libgomp/task.c. */
8082 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8083 tree avar = create_tmp_var_raw (atype);
8084 gimple_add_tmp_var (avar);
8085 TREE_ADDRESSABLE (avar) = 1;
8086 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8087 NULL_TREE, NULL_TREE);
8088 tree t = build_int_cst (pointer_sized_int_node, cnt);
8089 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8090 gimple_seq seq = NULL;
8091 tree sz = fold_convert (pointer_sized_int_node,
8092 TYPE_SIZE_UNIT (record_type));
8093 int cachesz = 64;
8094 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8095 build_int_cst (pointer_sized_int_node, cachesz - 1));
8096 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8097 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8098 ctx->task_reductions.create (1 + cnt);
8099 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8100 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8101 ? sz : NULL_TREE);
8102 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8103 gimple_seq_add_seq (start, seq);
8104 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8105 NULL_TREE, NULL_TREE);
8106 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8107 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8108 NULL_TREE, NULL_TREE);
8109 t = build_int_cst (pointer_sized_int_node,
8110 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8111 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8112 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8113 NULL_TREE, NULL_TREE);
8114 t = build_int_cst (pointer_sized_int_node, -1);
8115 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8116 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8117 NULL_TREE, NULL_TREE);
8118 t = build_int_cst (pointer_sized_int_node, 0);
8119 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8121 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8122 and for each task reduction checks a bool right after the private variable
8123 within that thread's chunk; if the bool is clear, it hasn't been
8124 initialized and thus isn't going to be reduced nor destructed, otherwise
8125 reduce and destruct it. */
8126 tree idx = create_tmp_var (size_type_node);
8127 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8128 tree num_thr_sz = create_tmp_var (size_type_node);
8129 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8130 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8131 tree lab3 = NULL_TREE;
8132 gimple *g;
8133 if (code == OMP_FOR || code == OMP_SECTIONS)
8135 /* For worksharing constructs, only perform it in the master thread,
8136 with the exception of cancelled implicit barriers - then only handle
8137 the current thread. */
8138 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8139 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8140 tree thr_num = create_tmp_var (integer_type_node);
8141 g = gimple_build_call (t, 0);
8142 gimple_call_set_lhs (g, thr_num);
8143 gimple_seq_add_stmt (end, g);
8144 if (cancellable)
8146 tree c;
8147 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8148 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8149 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8150 if (code == OMP_FOR)
8151 c = gimple_omp_for_clauses (ctx->stmt);
8152 else /* if (code == OMP_SECTIONS) */
8153 c = gimple_omp_sections_clauses (ctx->stmt);
8154 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8155 cancellable = c;
8156 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8157 lab5, lab6);
8158 gimple_seq_add_stmt (end, g);
8159 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8160 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8161 gimple_seq_add_stmt (end, g);
8162 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8163 build_one_cst (TREE_TYPE (idx)));
8164 gimple_seq_add_stmt (end, g);
8165 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8166 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8168 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8169 gimple_seq_add_stmt (end, g);
8170 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8172 if (code != OMP_PARALLEL)
8174 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8175 tree num_thr = create_tmp_var (integer_type_node);
8176 g = gimple_build_call (t, 0);
8177 gimple_call_set_lhs (g, num_thr);
8178 gimple_seq_add_stmt (end, g);
8179 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8180 gimple_seq_add_stmt (end, g);
8181 if (cancellable)
8182 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8184 else
8186 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8187 OMP_CLAUSE__REDUCTEMP_);
8188 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8189 t = fold_convert (size_type_node, t);
8190 gimplify_assign (num_thr_sz, t, end);
8192 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8193 NULL_TREE, NULL_TREE);
8194 tree data = create_tmp_var (pointer_sized_int_node);
8195 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8196 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8197 tree ptr;
8198 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8199 ptr = create_tmp_var (build_pointer_type (record_type));
8200 else
8201 ptr = create_tmp_var (ptr_type_node);
8202 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8204 tree field = TYPE_FIELDS (record_type);
8205 cnt = 0;
8206 if (cancellable)
8207 field = DECL_CHAIN (DECL_CHAIN (field));
8208 for (int pass = 0; pass < 2; pass++)
8210 tree decl, type, next;
8211 for (tree c = clauses;
8212 omp_task_reduction_iterate (pass, code, ccode,
8213 &c, &decl, &type, &next); c = next)
8215 tree var = decl, ref;
8216 if (TREE_CODE (decl) == MEM_REF)
8218 var = TREE_OPERAND (var, 0);
8219 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8220 var = TREE_OPERAND (var, 0);
8221 tree v = var;
8222 if (TREE_CODE (var) == ADDR_EXPR)
8223 var = TREE_OPERAND (var, 0);
8224 else if (TREE_CODE (var) == INDIRECT_REF)
8225 var = TREE_OPERAND (var, 0);
8226 tree orig_var = var;
8227 if (is_variable_sized (var))
8229 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8230 var = DECL_VALUE_EXPR (var);
8231 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8232 var = TREE_OPERAND (var, 0);
8233 gcc_assert (DECL_P (var));
8235 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8236 if (orig_var != var)
8237 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8238 else if (TREE_CODE (v) == ADDR_EXPR)
8239 t = build_fold_addr_expr (t);
8240 else if (TREE_CODE (v) == INDIRECT_REF)
8241 t = build_fold_indirect_ref (t);
8242 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8244 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8245 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8246 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8248 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8249 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8250 fold_convert (size_type_node,
8251 TREE_OPERAND (decl, 1)));
8253 else
8255 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8256 if (!omp_is_reference (decl))
8257 t = build_fold_addr_expr (t);
8259 t = fold_convert (pointer_sized_int_node, t);
8260 seq = NULL;
8261 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8262 gimple_seq_add_seq (start, seq);
8263 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8264 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8265 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8266 t = unshare_expr (byte_position (field));
8267 t = fold_convert (pointer_sized_int_node, t);
8268 ctx->task_reduction_map->put (c, cnt);
8269 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8270 ? t : NULL_TREE);
8271 seq = NULL;
8272 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8273 gimple_seq_add_seq (start, seq);
8274 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8275 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8276 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8278 tree bfield = DECL_CHAIN (field);
8279 tree cond;
8280 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8281 /* In parallel or worksharing all threads unconditionally
8282 initialize all their task reduction private variables. */
8283 cond = boolean_true_node;
8284 else if (TREE_TYPE (ptr) == ptr_type_node)
8286 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8287 unshare_expr (byte_position (bfield)));
8288 seq = NULL;
8289 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8290 gimple_seq_add_seq (end, seq);
8291 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8292 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8293 build_int_cst (pbool, 0));
8295 else
8296 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8297 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8298 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8299 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8300 tree condv = create_tmp_var (boolean_type_node);
8301 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8302 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8303 lab3, lab4);
8304 gimple_seq_add_stmt (end, g);
8305 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8306 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8308 /* If this reduction doesn't need destruction and parallel
8309 has been cancelled, there is nothing to do for this
8310 reduction, so jump around the merge operation. */
8311 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8312 g = gimple_build_cond (NE_EXPR, cancellable,
8313 build_zero_cst (TREE_TYPE (cancellable)),
8314 lab4, lab5);
8315 gimple_seq_add_stmt (end, g);
8316 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8319 tree new_var;
8320 if (TREE_TYPE (ptr) == ptr_type_node)
8322 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8323 unshare_expr (byte_position (field)));
8324 seq = NULL;
8325 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8326 gimple_seq_add_seq (end, seq);
8327 tree pbool = build_pointer_type (TREE_TYPE (field));
8328 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8329 build_int_cst (pbool, 0));
8331 else
8332 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8333 build_simple_mem_ref (ptr), field, NULL_TREE);
8335 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8336 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8337 ref = build_simple_mem_ref (ref);
8338 /* reduction(-:var) sums up the partial results, so it acts
8339 identically to reduction(+:var). */
8340 if (rcode == MINUS_EXPR)
8341 rcode = PLUS_EXPR;
8342 if (TREE_CODE (decl) == MEM_REF)
8344 tree type = TREE_TYPE (new_var);
8345 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8346 tree i = create_tmp_var (TREE_TYPE (v));
8347 tree ptype = build_pointer_type (TREE_TYPE (type));
8348 if (DECL_P (v))
8350 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8351 tree vv = create_tmp_var (TREE_TYPE (v));
8352 gimplify_assign (vv, v, start);
8353 v = vv;
8355 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8356 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8357 new_var = build_fold_addr_expr (new_var);
8358 new_var = fold_convert (ptype, new_var);
8359 ref = fold_convert (ptype, ref);
8360 tree m = create_tmp_var (ptype);
8361 gimplify_assign (m, new_var, end);
8362 new_var = m;
8363 m = create_tmp_var (ptype);
8364 gimplify_assign (m, ref, end);
8365 ref = m;
8366 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8367 tree body = create_artificial_label (UNKNOWN_LOCATION);
8368 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8369 gimple_seq_add_stmt (end, gimple_build_label (body));
8370 tree priv = build_simple_mem_ref (new_var);
8371 tree out = build_simple_mem_ref (ref);
8372 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8374 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8375 tree decl_placeholder
8376 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8377 tree lab6 = NULL_TREE;
8378 if (cancellable)
8380 /* If this reduction needs destruction and parallel
8381 has been cancelled, jump around the merge operation
8382 to the destruction. */
8383 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8384 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8385 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8386 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8387 lab6, lab5);
8388 gimple_seq_add_stmt (end, g);
8389 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8391 SET_DECL_VALUE_EXPR (placeholder, out);
8392 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8393 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8394 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8395 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8396 gimple_seq_add_seq (end,
8397 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8398 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8399 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8401 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8402 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8404 if (cancellable)
8405 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8406 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8407 if (x)
8409 gimple_seq tseq = NULL;
8410 gimplify_stmt (&x, &tseq);
8411 gimple_seq_add_seq (end, tseq);
8414 else
8416 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8417 out = unshare_expr (out);
8418 gimplify_assign (out, x, end);
8420 gimple *g
8421 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8422 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8423 gimple_seq_add_stmt (end, g);
8424 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8425 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8426 gimple_seq_add_stmt (end, g);
8427 g = gimple_build_assign (i, PLUS_EXPR, i,
8428 build_int_cst (TREE_TYPE (i), 1));
8429 gimple_seq_add_stmt (end, g);
8430 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8431 gimple_seq_add_stmt (end, g);
8432 gimple_seq_add_stmt (end, gimple_build_label (endl));
8434 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8436 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8437 tree oldv = NULL_TREE;
8438 tree lab6 = NULL_TREE;
8439 if (cancellable)
8441 /* If this reduction needs destruction and parallel
8442 has been cancelled, jump around the merge operation
8443 to the destruction. */
8444 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8445 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8446 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8447 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8448 lab6, lab5);
8449 gimple_seq_add_stmt (end, g);
8450 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8452 if (omp_is_reference (decl)
8453 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8454 TREE_TYPE (ref)))
8455 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8456 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8457 tree refv = create_tmp_var (TREE_TYPE (ref));
8458 gimplify_assign (refv, ref, end);
8459 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8460 SET_DECL_VALUE_EXPR (placeholder, ref);
8461 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8462 tree d = maybe_lookup_decl (decl, ctx);
8463 gcc_assert (d);
8464 if (DECL_HAS_VALUE_EXPR_P (d))
8465 oldv = DECL_VALUE_EXPR (d);
8466 if (omp_is_reference (var))
8468 tree v = fold_convert (TREE_TYPE (d),
8469 build_fold_addr_expr (new_var));
8470 SET_DECL_VALUE_EXPR (d, v);
8472 else
8473 SET_DECL_VALUE_EXPR (d, new_var);
8474 DECL_HAS_VALUE_EXPR_P (d) = 1;
8475 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8476 if (oldv)
8477 SET_DECL_VALUE_EXPR (d, oldv);
8478 else
8480 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8481 DECL_HAS_VALUE_EXPR_P (d) = 0;
8483 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8484 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8485 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8486 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8487 if (cancellable)
8488 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8489 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8490 if (x)
8492 gimple_seq tseq = NULL;
8493 gimplify_stmt (&x, &tseq);
8494 gimple_seq_add_seq (end, tseq);
8497 else
8499 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8500 ref = unshare_expr (ref);
8501 gimplify_assign (ref, x, end);
8503 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8504 ++cnt;
8505 field = DECL_CHAIN (bfield);
8509 if (code == OMP_TASKGROUP)
8511 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8512 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8513 gimple_seq_add_stmt (start, g);
8515 else
8517 tree c;
8518 if (code == OMP_FOR)
8519 c = gimple_omp_for_clauses (ctx->stmt);
8520 else if (code == OMP_SECTIONS)
8521 c = gimple_omp_sections_clauses (ctx->stmt);
8522 else
8523 c = gimple_omp_taskreg_clauses (ctx->stmt);
8524 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8525 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8526 build_fold_addr_expr (avar));
8527 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8530 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8531 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8532 size_one_node));
8533 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8534 gimple_seq_add_stmt (end, g);
8535 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8536 if (code == OMP_FOR || code == OMP_SECTIONS)
8538 enum built_in_function bfn
8539 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8540 t = builtin_decl_explicit (bfn);
8541 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8542 tree arg;
8543 if (cancellable)
8545 arg = create_tmp_var (c_bool_type);
8546 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8547 cancellable));
8549 else
8550 arg = build_int_cst (c_bool_type, 0);
8551 g = gimple_build_call (t, 1, arg);
8553 else
8555 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8556 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8558 gimple_seq_add_stmt (end, g);
8559 t = build_constructor (atype, NULL);
8560 TREE_THIS_VOLATILE (t) = 1;
8561 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8564 /* Expand code for an OpenMP taskgroup directive. */
8566 static void
8567 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8569 gimple *stmt = gsi_stmt (*gsi_p);
8570 gcall *x;
8571 gbind *bind;
8572 gimple_seq dseq = NULL;
8573 tree block = make_node (BLOCK);
8575 bind = gimple_build_bind (NULL, NULL, block);
8576 gsi_replace (gsi_p, bind, true);
8577 gimple_bind_add_stmt (bind, stmt);
8579 push_gimplify_context ();
8581 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8583 gimple_bind_add_stmt (bind, x);
8585 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8586 gimple_omp_taskgroup_clauses (stmt),
8587 gimple_bind_body_ptr (bind), &dseq);
8589 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8590 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8591 gimple_omp_set_body (stmt, NULL);
8593 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8594 gimple_bind_add_seq (bind, dseq);
8596 pop_gimplify_context (bind);
8598 gimple_bind_append_vars (bind, ctx->block_vars);
8599 BLOCK_VARS (block) = ctx->block_vars;
8603 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8605 static void
8606 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8607 omp_context *ctx)
8609 struct omp_for_data fd;
8610 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8611 return;
8613 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8614 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8615 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8616 if (!fd.ordered)
8617 return;
8619 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8620 tree c = gimple_omp_ordered_clauses (ord_stmt);
8621 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8622 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8624 /* Merge depend clauses from multiple adjacent
8625 #pragma omp ordered depend(sink:...) constructs
8626 into one #pragma omp ordered depend(sink:...), so that
8627 we can optimize them together. */
8628 gimple_stmt_iterator gsi = *gsi_p;
8629 gsi_next (&gsi);
8630 while (!gsi_end_p (gsi))
8632 gimple *stmt = gsi_stmt (gsi);
8633 if (is_gimple_debug (stmt)
8634 || gimple_code (stmt) == GIMPLE_NOP)
8636 gsi_next (&gsi);
8637 continue;
8639 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8640 break;
8641 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8642 c = gimple_omp_ordered_clauses (ord_stmt2);
8643 if (c == NULL_TREE
8644 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8645 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8646 break;
8647 while (*list_p)
8648 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8649 *list_p = c;
8650 gsi_remove (&gsi, true);
8654 /* Canonicalize sink dependence clauses into one folded clause if
8655 possible.
8657 The basic algorithm is to create a sink vector whose first
8658 element is the GCD of all the first elements, and whose remaining
8659 elements are the minimum of the subsequent columns.
8661 We ignore dependence vectors whose first element is zero because
8662 such dependencies are known to be executed by the same thread.
8664 We take into account the direction of the loop, so a minimum
8665 becomes a maximum if the loop is iterating forwards. We also
8666 ignore sink clauses where the loop direction is unknown, or where
8667 the offsets are clearly invalid because they are not a multiple
8668 of the loop increment.
8670 For example:
8672 #pragma omp for ordered(2)
8673 for (i=0; i < N; ++i)
8674 for (j=0; j < M; ++j)
8676 #pragma omp ordered \
8677 depend(sink:i-8,j-2) \
8678 depend(sink:i,j-1) \ // Completely ignored because i+0.
8679 depend(sink:i-4,j-3) \
8680 depend(sink:i-6,j-4)
8681 #pragma omp ordered depend(source)
8684 Folded clause is:
8686 depend(sink:-gcd(8,4,6),-min(2,3,4))
8687 -or-
8688 depend(sink:-2,-2)
8691 /* FIXME: Computing GCD's where the first element is zero is
8692 non-trivial in the presence of collapsed loops. Do this later. */
8693 if (fd.collapse > 1)
8694 return;
8696 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8698 /* wide_int is not a POD so it must be default-constructed. */
8699 for (unsigned i = 0; i != 2 * len - 1; ++i)
8700 new (static_cast<void*>(folded_deps + i)) wide_int ();
8702 tree folded_dep = NULL_TREE;
8703 /* TRUE if the first dimension's offset is negative. */
8704 bool neg_offset_p = false;
8706 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8707 unsigned int i;
8708 while ((c = *list_p) != NULL)
8710 bool remove = false;
8712 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8713 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8714 goto next_ordered_clause;
8716 tree vec;
8717 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8718 vec && TREE_CODE (vec) == TREE_LIST;
8719 vec = TREE_CHAIN (vec), ++i)
8721 gcc_assert (i < len);
8723 /* omp_extract_for_data has canonicalized the condition. */
8724 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8725 || fd.loops[i].cond_code == GT_EXPR);
8726 bool forward = fd.loops[i].cond_code == LT_EXPR;
8727 bool maybe_lexically_later = true;
8729 /* While the committee makes up its mind, bail if we have any
8730 non-constant steps. */
8731 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8732 goto lower_omp_ordered_ret;
8734 tree itype = TREE_TYPE (TREE_VALUE (vec));
8735 if (POINTER_TYPE_P (itype))
8736 itype = sizetype;
8737 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8738 TYPE_PRECISION (itype),
8739 TYPE_SIGN (itype));
8741 /* Ignore invalid offsets that are not multiples of the step. */
8742 if (!wi::multiple_of_p (wi::abs (offset),
8743 wi::abs (wi::to_wide (fd.loops[i].step)),
8744 UNSIGNED))
8746 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8747 "ignoring sink clause with offset that is not "
8748 "a multiple of the loop step");
8749 remove = true;
8750 goto next_ordered_clause;
8753 /* Calculate the first dimension. The first dimension of
8754 the folded dependency vector is the GCD of the first
8755 elements, while ignoring any first elements whose offset
8756 is 0. */
8757 if (i == 0)
8759 /* Ignore dependence vectors whose first dimension is 0. */
8760 if (offset == 0)
8762 remove = true;
8763 goto next_ordered_clause;
8765 else
8767 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8769 error_at (OMP_CLAUSE_LOCATION (c),
8770 "first offset must be in opposite direction "
8771 "of loop iterations");
8772 goto lower_omp_ordered_ret;
8774 if (forward)
8775 offset = -offset;
8776 neg_offset_p = forward;
8777 /* Initialize the first time around. */
8778 if (folded_dep == NULL_TREE)
8780 folded_dep = c;
8781 folded_deps[0] = offset;
8783 else
8784 folded_deps[0] = wi::gcd (folded_deps[0],
8785 offset, UNSIGNED);
8788 /* Calculate minimum for the remaining dimensions. */
8789 else
8791 folded_deps[len + i - 1] = offset;
8792 if (folded_dep == c)
8793 folded_deps[i] = offset;
8794 else if (maybe_lexically_later
8795 && !wi::eq_p (folded_deps[i], offset))
8797 if (forward ^ wi::gts_p (folded_deps[i], offset))
8799 unsigned int j;
8800 folded_dep = c;
8801 for (j = 1; j <= i; j++)
8802 folded_deps[j] = folded_deps[len + j - 1];
8804 else
8805 maybe_lexically_later = false;
8809 gcc_assert (i == len);
8811 remove = true;
8813 next_ordered_clause:
8814 if (remove)
8815 *list_p = OMP_CLAUSE_CHAIN (c);
8816 else
8817 list_p = &OMP_CLAUSE_CHAIN (c);
8820 if (folded_dep)
8822 if (neg_offset_p)
8823 folded_deps[0] = -folded_deps[0];
8825 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8826 if (POINTER_TYPE_P (itype))
8827 itype = sizetype;
8829 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8830 = wide_int_to_tree (itype, folded_deps[0]);
8831 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8832 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8835 lower_omp_ordered_ret:
8837 /* Ordered without clauses is #pragma omp threads, while we want
8838 a nop instead if we remove all clauses. */
8839 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8840 gsi_replace (gsi_p, gimple_build_nop (), true);
8844 /* Expand code for an OpenMP ordered directive. */
8846 static void
8847 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8849 tree block;
8850 gimple *stmt = gsi_stmt (*gsi_p), *g;
8851 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8852 gcall *x;
8853 gbind *bind;
8854 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8855 OMP_CLAUSE_SIMD);
8856 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8857 loop. */
8858 bool maybe_simt
8859 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8860 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8861 OMP_CLAUSE_THREADS);
8863 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8864 OMP_CLAUSE_DEPEND))
8866 /* FIXME: This is needs to be moved to the expansion to verify various
8867 conditions only testable on cfg with dominators computed, and also
8868 all the depend clauses to be merged still might need to be available
8869 for the runtime checks. */
8870 if (0)
8871 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8872 return;
8875 push_gimplify_context ();
8877 block = make_node (BLOCK);
8878 bind = gimple_build_bind (NULL, NULL, block);
8879 gsi_replace (gsi_p, bind, true);
8880 gimple_bind_add_stmt (bind, stmt);
8882 if (simd)
8884 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8885 build_int_cst (NULL_TREE, threads));
8886 cfun->has_simduid_loops = true;
8888 else
8889 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8891 gimple_bind_add_stmt (bind, x);
8893 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8894 if (maybe_simt)
8896 counter = create_tmp_var (integer_type_node);
8897 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8898 gimple_call_set_lhs (g, counter);
8899 gimple_bind_add_stmt (bind, g);
8901 body = create_artificial_label (UNKNOWN_LOCATION);
8902 test = create_artificial_label (UNKNOWN_LOCATION);
8903 gimple_bind_add_stmt (bind, gimple_build_label (body));
8905 tree simt_pred = create_tmp_var (integer_type_node);
8906 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8907 gimple_call_set_lhs (g, simt_pred);
8908 gimple_bind_add_stmt (bind, g);
8910 tree t = create_artificial_label (UNKNOWN_LOCATION);
8911 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8912 gimple_bind_add_stmt (bind, g);
8914 gimple_bind_add_stmt (bind, gimple_build_label (t));
8916 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8917 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8918 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8919 gimple_omp_set_body (stmt, NULL);
8921 if (maybe_simt)
8923 gimple_bind_add_stmt (bind, gimple_build_label (test));
8924 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8925 gimple_bind_add_stmt (bind, g);
8927 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8928 tree nonneg = create_tmp_var (integer_type_node);
8929 gimple_seq tseq = NULL;
8930 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8931 gimple_bind_add_seq (bind, tseq);
8933 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8934 gimple_call_set_lhs (g, nonneg);
8935 gimple_bind_add_stmt (bind, g);
8937 tree end = create_artificial_label (UNKNOWN_LOCATION);
8938 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8939 gimple_bind_add_stmt (bind, g);
8941 gimple_bind_add_stmt (bind, gimple_build_label (end));
8943 if (simd)
8944 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8945 build_int_cst (NULL_TREE, threads));
8946 else
8947 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8949 gimple_bind_add_stmt (bind, x);
8951 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8953 pop_gimplify_context (bind);
8955 gimple_bind_append_vars (bind, ctx->block_vars);
8956 BLOCK_VARS (block) = gimple_bind_vars (bind);
8960 /* Expand code for an OpenMP scan directive and the structured block
8961 before the scan directive. */
8963 static void
8964 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8966 gimple *stmt = gsi_stmt (*gsi_p);
8967 bool has_clauses
8968 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8969 tree lane = NULL_TREE;
8970 gimple_seq before = NULL;
8971 omp_context *octx = ctx->outer;
8972 gcc_assert (octx);
8973 if (octx->scan_exclusive && !has_clauses)
8975 gimple_stmt_iterator gsi2 = *gsi_p;
8976 gsi_next (&gsi2);
8977 gimple *stmt2 = gsi_stmt (gsi2);
8978 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8979 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8980 the one with exclusive clause(s), comes first. */
8981 if (stmt2
8982 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8983 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8985 gsi_remove (gsi_p, false);
8986 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8987 ctx = maybe_lookup_ctx (stmt2);
8988 gcc_assert (ctx);
8989 lower_omp_scan (gsi_p, ctx);
8990 return;
8994 bool input_phase = has_clauses ^ octx->scan_inclusive;
8995 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8996 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
8997 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8998 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
8999 && !gimple_omp_for_combined_p (octx->stmt));
9000 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9001 if (is_for_simd && octx->for_simd_scan_phase)
9002 is_simd = false;
9003 if (is_simd)
9004 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9005 OMP_CLAUSE__SIMDUID_))
9007 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9008 lane = create_tmp_var (unsigned_type_node);
9009 tree t = build_int_cst (integer_type_node,
9010 input_phase ? 1
9011 : octx->scan_inclusive ? 2 : 3);
9012 gimple *g
9013 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9014 gimple_call_set_lhs (g, lane);
9015 gimple_seq_add_stmt (&before, g);
9018 if (is_simd || is_for)
9020 for (tree c = gimple_omp_for_clauses (octx->stmt);
9021 c; c = OMP_CLAUSE_CHAIN (c))
9022 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9023 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9025 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9026 tree var = OMP_CLAUSE_DECL (c);
9027 tree new_var = lookup_decl (var, octx);
9028 tree val = new_var;
9029 tree var2 = NULL_TREE;
9030 tree var3 = NULL_TREE;
9031 tree var4 = NULL_TREE;
9032 tree lane0 = NULL_TREE;
9033 tree new_vard = new_var;
9034 if (omp_is_reference (var))
9036 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9037 val = new_var;
9039 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9041 val = DECL_VALUE_EXPR (new_vard);
9042 if (new_vard != new_var)
9044 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9045 val = TREE_OPERAND (val, 0);
9047 if (TREE_CODE (val) == ARRAY_REF
9048 && VAR_P (TREE_OPERAND (val, 0)))
9050 tree v = TREE_OPERAND (val, 0);
9051 if (lookup_attribute ("omp simd array",
9052 DECL_ATTRIBUTES (v)))
9054 val = unshare_expr (val);
9055 lane0 = TREE_OPERAND (val, 1);
9056 TREE_OPERAND (val, 1) = lane;
9057 var2 = lookup_decl (v, octx);
9058 if (octx->scan_exclusive)
9059 var4 = lookup_decl (var2, octx);
9060 if (input_phase
9061 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9062 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9063 if (!input_phase)
9065 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9066 var2, lane, NULL_TREE, NULL_TREE);
9067 TREE_THIS_NOTRAP (var2) = 1;
9068 if (octx->scan_exclusive)
9070 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9071 var4, lane, NULL_TREE,
9072 NULL_TREE);
9073 TREE_THIS_NOTRAP (var4) = 1;
9076 else
9077 var2 = val;
9080 gcc_assert (var2);
9082 else
9084 var2 = build_outer_var_ref (var, octx);
9085 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9087 var3 = maybe_lookup_decl (new_vard, octx);
9088 if (var3 == new_vard || var3 == NULL_TREE)
9089 var3 = NULL_TREE;
9090 else if (is_simd && octx->scan_exclusive && !input_phase)
9092 var4 = maybe_lookup_decl (var3, octx);
9093 if (var4 == var3 || var4 == NULL_TREE)
9095 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9097 var4 = var3;
9098 var3 = NULL_TREE;
9100 else
9101 var4 = NULL_TREE;
9105 if (is_simd
9106 && octx->scan_exclusive
9107 && !input_phase
9108 && var4 == NULL_TREE)
9109 var4 = create_tmp_var (TREE_TYPE (val));
9111 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9113 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9114 if (input_phase)
9116 if (var3)
9118 /* If we've added a separate identity element
9119 variable, copy it over into val. */
9120 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9121 var3);
9122 gimplify_and_add (x, &before);
9124 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9126 /* Otherwise, assign to it the identity element. */
9127 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9128 if (is_for)
9129 tseq = copy_gimple_seq_and_replace_locals (tseq);
9130 tree ref = build_outer_var_ref (var, octx);
9131 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9132 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9133 if (x)
9135 if (new_vard != new_var)
9136 val = build_fold_addr_expr_loc (clause_loc, val);
9137 SET_DECL_VALUE_EXPR (new_vard, val);
9139 SET_DECL_VALUE_EXPR (placeholder, ref);
9140 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9141 lower_omp (&tseq, octx);
9142 if (x)
9143 SET_DECL_VALUE_EXPR (new_vard, x);
9144 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9145 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9146 gimple_seq_add_seq (&before, tseq);
9147 if (is_simd)
9148 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9151 else if (is_simd)
9153 tree x;
9154 if (octx->scan_exclusive)
9156 tree v4 = unshare_expr (var4);
9157 tree v2 = unshare_expr (var2);
9158 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9159 gimplify_and_add (x, &before);
9161 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9162 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9163 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9164 tree vexpr = val;
9165 if (x && new_vard != new_var)
9166 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9167 if (x)
9168 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9169 SET_DECL_VALUE_EXPR (placeholder, var2);
9170 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9171 lower_omp (&tseq, octx);
9172 gimple_seq_add_seq (&before, tseq);
9173 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9174 if (x)
9175 SET_DECL_VALUE_EXPR (new_vard, x);
9176 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9177 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9178 if (octx->scan_inclusive)
9180 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9181 var2);
9182 gimplify_and_add (x, &before);
9184 else if (lane0 == NULL_TREE)
9186 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9187 var4);
9188 gimplify_and_add (x, &before);
9192 else
9194 if (input_phase)
9196 /* input phase. Set val to initializer before
9197 the body. */
9198 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9199 gimplify_assign (val, x, &before);
9201 else if (is_simd)
9203 /* scan phase. */
9204 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9205 if (code == MINUS_EXPR)
9206 code = PLUS_EXPR;
9208 tree x = build2 (code, TREE_TYPE (var2),
9209 unshare_expr (var2), unshare_expr (val));
9210 if (octx->scan_inclusive)
9212 gimplify_assign (unshare_expr (var2), x, &before);
9213 gimplify_assign (val, var2, &before);
9215 else
9217 gimplify_assign (unshare_expr (var4),
9218 unshare_expr (var2), &before);
9219 gimplify_assign (var2, x, &before);
9220 if (lane0 == NULL_TREE)
9221 gimplify_assign (val, var4, &before);
9225 if (octx->scan_exclusive && !input_phase && lane0)
9227 tree vexpr = unshare_expr (var4);
9228 TREE_OPERAND (vexpr, 1) = lane0;
9229 if (new_vard != new_var)
9230 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9231 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9235 if (is_simd && !is_for_simd)
9237 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9238 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9239 gsi_replace (gsi_p, gimple_build_nop (), true);
9240 return;
9242 lower_omp (gimple_omp_body_ptr (stmt), octx);
9243 if (before)
9245 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9246 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9251 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9252 substitution of a couple of function calls. But in the NAMED case,
9253 requires that languages coordinate a symbol name. It is therefore
9254 best put here in common code. */
9256 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9258 static void
9259 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9261 tree block;
9262 tree name, lock, unlock;
9263 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9264 gbind *bind;
9265 location_t loc = gimple_location (stmt);
9266 gimple_seq tbody;
9268 name = gimple_omp_critical_name (stmt);
9269 if (name)
9271 tree decl;
9273 if (!critical_name_mutexes)
9274 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9276 tree *n = critical_name_mutexes->get (name);
9277 if (n == NULL)
9279 char *new_str;
9281 decl = create_tmp_var_raw (ptr_type_node);
9283 new_str = ACONCAT ((".gomp_critical_user_",
9284 IDENTIFIER_POINTER (name), NULL));
9285 DECL_NAME (decl) = get_identifier (new_str);
9286 TREE_PUBLIC (decl) = 1;
9287 TREE_STATIC (decl) = 1;
9288 DECL_COMMON (decl) = 1;
9289 DECL_ARTIFICIAL (decl) = 1;
9290 DECL_IGNORED_P (decl) = 1;
9292 varpool_node::finalize_decl (decl);
9294 critical_name_mutexes->put (name, decl);
9296 else
9297 decl = *n;
9299 /* If '#pragma omp critical' is inside offloaded region or
9300 inside function marked as offloadable, the symbol must be
9301 marked as offloadable too. */
9302 omp_context *octx;
9303 if (cgraph_node::get (current_function_decl)->offloadable)
9304 varpool_node::get_create (decl)->offloadable = 1;
9305 else
9306 for (octx = ctx->outer; octx; octx = octx->outer)
9307 if (is_gimple_omp_offloaded (octx->stmt))
9309 varpool_node::get_create (decl)->offloadable = 1;
9310 break;
9313 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9314 lock = build_call_expr_loc (loc, lock, 1,
9315 build_fold_addr_expr_loc (loc, decl));
9317 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9318 unlock = build_call_expr_loc (loc, unlock, 1,
9319 build_fold_addr_expr_loc (loc, decl));
9321 else
9323 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9324 lock = build_call_expr_loc (loc, lock, 0);
9326 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9327 unlock = build_call_expr_loc (loc, unlock, 0);
9330 push_gimplify_context ();
9332 block = make_node (BLOCK);
9333 bind = gimple_build_bind (NULL, NULL, block);
9334 gsi_replace (gsi_p, bind, true);
9335 gimple_bind_add_stmt (bind, stmt);
9337 tbody = gimple_bind_body (bind);
9338 gimplify_and_add (lock, &tbody);
9339 gimple_bind_set_body (bind, tbody);
9341 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9342 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9343 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9344 gimple_omp_set_body (stmt, NULL);
9346 tbody = gimple_bind_body (bind);
9347 gimplify_and_add (unlock, &tbody);
9348 gimple_bind_set_body (bind, tbody);
9350 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9352 pop_gimplify_context (bind);
9353 gimple_bind_append_vars (bind, ctx->block_vars);
9354 BLOCK_VARS (block) = gimple_bind_vars (bind);
9357 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9358 for a lastprivate clause. Given a loop control predicate of (V
9359 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9360 is appended to *DLIST, iterator initialization is appended to
9361 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9362 to be emitted in a critical section. */
9364 static void
9365 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9366 gimple_seq *dlist, gimple_seq *clist,
9367 struct omp_context *ctx)
9369 tree clauses, cond, vinit;
9370 enum tree_code cond_code;
9371 gimple_seq stmts;
9373 cond_code = fd->loop.cond_code;
9374 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9376 /* When possible, use a strict equality expression. This can let VRP
9377 type optimizations deduce the value and remove a copy. */
9378 if (tree_fits_shwi_p (fd->loop.step))
9380 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9381 if (step == 1 || step == -1)
9382 cond_code = EQ_EXPR;
9385 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9386 || gimple_omp_for_grid_phony (fd->for_stmt))
9387 cond = omp_grid_lastprivate_predicate (fd);
9388 else
9390 tree n2 = fd->loop.n2;
9391 if (fd->collapse > 1
9392 && TREE_CODE (n2) != INTEGER_CST
9393 && gimple_omp_for_combined_into_p (fd->for_stmt))
9395 struct omp_context *taskreg_ctx = NULL;
9396 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9398 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9399 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9400 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9402 if (gimple_omp_for_combined_into_p (gfor))
9404 gcc_assert (ctx->outer->outer
9405 && is_parallel_ctx (ctx->outer->outer));
9406 taskreg_ctx = ctx->outer->outer;
9408 else
9410 struct omp_for_data outer_fd;
9411 omp_extract_for_data (gfor, &outer_fd, NULL);
9412 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9415 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9416 taskreg_ctx = ctx->outer->outer;
9418 else if (is_taskreg_ctx (ctx->outer))
9419 taskreg_ctx = ctx->outer;
9420 if (taskreg_ctx)
9422 int i;
9423 tree taskreg_clauses
9424 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9425 tree innerc = omp_find_clause (taskreg_clauses,
9426 OMP_CLAUSE__LOOPTEMP_);
9427 gcc_assert (innerc);
9428 for (i = 0; i < fd->collapse; i++)
9430 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9431 OMP_CLAUSE__LOOPTEMP_);
9432 gcc_assert (innerc);
9434 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9435 OMP_CLAUSE__LOOPTEMP_);
9436 if (innerc)
9437 n2 = fold_convert (TREE_TYPE (n2),
9438 lookup_decl (OMP_CLAUSE_DECL (innerc),
9439 taskreg_ctx));
9442 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9445 clauses = gimple_omp_for_clauses (fd->for_stmt);
9446 stmts = NULL;
9447 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9448 if (!gimple_seq_empty_p (stmts))
9450 gimple_seq_add_seq (&stmts, *dlist);
9451 *dlist = stmts;
9453 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9454 vinit = fd->loop.n1;
9455 if (cond_code == EQ_EXPR
9456 && tree_fits_shwi_p (fd->loop.n2)
9457 && ! integer_zerop (fd->loop.n2))
9458 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9459 else
9460 vinit = unshare_expr (vinit);
9462 /* Initialize the iterator variable, so that threads that don't execute
9463 any iterations don't execute the lastprivate clauses by accident. */
9464 gimplify_assign (fd->loop.v, vinit, body_p);
9468 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9470 static tree
9471 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9472 struct walk_stmt_info *wi)
9474 gimple *stmt = gsi_stmt (*gsi_p);
9476 *handled_ops_p = true;
9477 switch (gimple_code (stmt))
9479 WALK_SUBSTMTS;
9481 case GIMPLE_OMP_FOR:
9482 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9483 && gimple_omp_for_combined_into_p (stmt))
9484 *handled_ops_p = false;
9485 break;
9487 case GIMPLE_OMP_SCAN:
9488 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9489 return integer_zero_node;
9490 default:
9491 break;
9493 return NULL;
9496 /* Helper function for lower_omp_for, add transformations for a worksharing
9497 loop with scan directives inside of it.
9498 For worksharing loop not combined with simd, transform:
9499 #pragma omp for reduction(inscan,+:r) private(i)
9500 for (i = 0; i < n; i = i + 1)
9503 update (r);
9505 #pragma omp scan inclusive(r)
9507 use (r);
9511 into two worksharing loops + code to merge results:
9513 num_threads = omp_get_num_threads ();
9514 thread_num = omp_get_thread_num ();
9515 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9516 <D.2099>:
9517 var2 = r;
9518 goto <D.2101>;
9519 <D.2100>:
9520 // For UDRs this is UDR init, or if ctors are needed, copy from
9521 // var3 that has been constructed to contain the neutral element.
9522 var2 = 0;
9523 <D.2101>:
9524 ivar = 0;
9525 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9526 // a shared array with num_threads elements and rprivb to a local array
9527 // number of elements equal to the number of (contiguous) iterations the
9528 // current thread will perform. controlb and controlp variables are
9529 // temporaries to handle deallocation of rprivb at the end of second
9530 // GOMP_FOR.
9531 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9532 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9533 for (i = 0; i < n; i = i + 1)
9536 // For UDRs this is UDR init or copy from var3.
9537 r = 0;
9538 // This is the input phase from user code.
9539 update (r);
9542 // For UDRs this is UDR merge.
9543 var2 = var2 + r;
9544 // Rather than handing it over to the user, save to local thread's
9545 // array.
9546 rprivb[ivar] = var2;
9547 // For exclusive scan, the above two statements are swapped.
9548 ivar = ivar + 1;
9551 // And remember the final value from this thread's into the shared
9552 // rpriva array.
9553 rpriva[(sizetype) thread_num] = var2;
9554 // If more than one thread, compute using Work-Efficient prefix sum
9555 // the inclusive parallel scan of the rpriva array.
9556 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9557 <D.2102>:
9558 GOMP_barrier ();
9559 down = 0;
9560 k = 1;
9561 num_threadsu = (unsigned int) num_threads;
9562 thread_numup1 = (unsigned int) thread_num + 1;
9563 <D.2108>:
9564 twok = k << 1;
9565 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9566 <D.2110>:
9567 down = 4294967295;
9568 k = k >> 1;
9569 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9570 <D.2112>:
9571 k = k >> 1;
9572 <D.2111>:
9573 twok = k << 1;
9574 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9575 mul = REALPART_EXPR <cplx>;
9576 ovf = IMAGPART_EXPR <cplx>;
9577 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9578 <D.2116>:
9579 andv = k & down;
9580 andvm1 = andv + 4294967295;
9581 l = mul + andvm1;
9582 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9583 <D.2120>:
9584 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9585 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9586 rpriva[l] = rpriva[l - k] + rpriva[l];
9587 <D.2117>:
9588 if (down == 0) goto <D.2121>; else goto <D.2122>;
9589 <D.2121>:
9590 k = k << 1;
9591 goto <D.2123>;
9592 <D.2122>:
9593 k = k >> 1;
9594 <D.2123>:
9595 GOMP_barrier ();
9596 if (k != 0) goto <D.2108>; else goto <D.2103>;
9597 <D.2103>:
9598 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9599 <D.2124>:
9600 // For UDRs this is UDR init or copy from var3.
9601 var2 = 0;
9602 goto <D.2126>;
9603 <D.2125>:
9604 var2 = rpriva[thread_num - 1];
9605 <D.2126>:
9606 ivar = 0;
9607 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9608 reduction(inscan,+:r) private(i)
9609 for (i = 0; i < n; i = i + 1)
9612 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9613 r = var2 + rprivb[ivar];
9616 // This is the scan phase from user code.
9617 use (r);
9618 // Plus a bump of the iterator.
9619 ivar = ivar + 1;
9621 } */
9623 static void
9624 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9625 struct omp_for_data *fd, omp_context *ctx)
9627 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9628 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9630 gimple_seq body = gimple_omp_body (stmt);
9631 gimple_stmt_iterator input1_gsi = gsi_none ();
9632 struct walk_stmt_info wi;
9633 memset (&wi, 0, sizeof (wi));
9634 wi.val_only = true;
9635 wi.info = (void *) &input1_gsi;
9636 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9637 gcc_assert (!gsi_end_p (input1_gsi));
9639 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9640 gimple_stmt_iterator gsi = input1_gsi;
9641 gsi_next (&gsi);
9642 gimple_stmt_iterator scan1_gsi = gsi;
9643 gimple *scan_stmt1 = gsi_stmt (gsi);
9644 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9646 gimple_seq input_body = gimple_omp_body (input_stmt1);
9647 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9648 gimple_omp_set_body (input_stmt1, NULL);
9649 gimple_omp_set_body (scan_stmt1, NULL);
9650 gimple_omp_set_body (stmt, NULL);
9652 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9653 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9654 gimple_omp_set_body (stmt, body);
9655 gimple_omp_set_body (input_stmt1, input_body);
9657 gimple_stmt_iterator input2_gsi = gsi_none ();
9658 memset (&wi, 0, sizeof (wi));
9659 wi.val_only = true;
9660 wi.info = (void *) &input2_gsi;
9661 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9662 gcc_assert (!gsi_end_p (input2_gsi));
9664 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9665 gsi = input2_gsi;
9666 gsi_next (&gsi);
9667 gimple_stmt_iterator scan2_gsi = gsi;
9668 gimple *scan_stmt2 = gsi_stmt (gsi);
9669 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9670 gimple_omp_set_body (scan_stmt2, scan_body);
9672 gimple_stmt_iterator input3_gsi = gsi_none ();
9673 gimple_stmt_iterator scan3_gsi = gsi_none ();
9674 gimple_stmt_iterator input4_gsi = gsi_none ();
9675 gimple_stmt_iterator scan4_gsi = gsi_none ();
9676 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9677 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9678 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9679 if (is_for_simd)
9681 memset (&wi, 0, sizeof (wi));
9682 wi.val_only = true;
9683 wi.info = (void *) &input3_gsi;
9684 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9685 gcc_assert (!gsi_end_p (input3_gsi));
9687 input_stmt3 = gsi_stmt (input3_gsi);
9688 gsi = input3_gsi;
9689 gsi_next (&gsi);
9690 scan3_gsi = gsi;
9691 scan_stmt3 = gsi_stmt (gsi);
9692 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9694 memset (&wi, 0, sizeof (wi));
9695 wi.val_only = true;
9696 wi.info = (void *) &input4_gsi;
9697 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9698 gcc_assert (!gsi_end_p (input4_gsi));
9700 input_stmt4 = gsi_stmt (input4_gsi);
9701 gsi = input4_gsi;
9702 gsi_next (&gsi);
9703 scan4_gsi = gsi;
9704 scan_stmt4 = gsi_stmt (gsi);
9705 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9707 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9708 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9711 tree num_threads = create_tmp_var (integer_type_node);
9712 tree thread_num = create_tmp_var (integer_type_node);
9713 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9714 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9715 gimple *g = gimple_build_call (nthreads_decl, 0);
9716 gimple_call_set_lhs (g, num_threads);
9717 gimple_seq_add_stmt (body_p, g);
9718 g = gimple_build_call (threadnum_decl, 0);
9719 gimple_call_set_lhs (g, thread_num);
9720 gimple_seq_add_stmt (body_p, g);
9722 tree ivar = create_tmp_var (sizetype);
9723 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9724 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9725 tree k = create_tmp_var (unsigned_type_node);
9726 tree l = create_tmp_var (unsigned_type_node);
9728 gimple_seq clist = NULL, mdlist = NULL;
9729 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9730 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9731 gimple_seq scan1_list = NULL, input2_list = NULL;
9732 gimple_seq last_list = NULL, reduc_list = NULL;
9733 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9734 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9735 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9737 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9738 tree var = OMP_CLAUSE_DECL (c);
9739 tree new_var = lookup_decl (var, ctx);
9740 tree var3 = NULL_TREE;
9741 tree new_vard = new_var;
9742 if (omp_is_reference (var))
9743 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9744 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9746 var3 = maybe_lookup_decl (new_vard, ctx);
9747 if (var3 == new_vard)
9748 var3 = NULL_TREE;
9751 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9752 tree rpriva = create_tmp_var (ptype);
9753 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9754 OMP_CLAUSE_DECL (nc) = rpriva;
9755 *cp1 = nc;
9756 cp1 = &OMP_CLAUSE_CHAIN (nc);
9758 tree rprivb = create_tmp_var (ptype);
9759 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9760 OMP_CLAUSE_DECL (nc) = rprivb;
9761 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9762 *cp1 = nc;
9763 cp1 = &OMP_CLAUSE_CHAIN (nc);
9765 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9766 if (new_vard != new_var)
9767 TREE_ADDRESSABLE (var2) = 1;
9768 gimple_add_tmp_var (var2);
9770 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9771 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9772 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9773 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9774 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9776 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9777 thread_num, integer_minus_one_node);
9778 x = fold_convert_loc (clause_loc, sizetype, x);
9779 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9780 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9781 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9782 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9784 x = fold_convert_loc (clause_loc, sizetype, l);
9785 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9786 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9787 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9788 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9790 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9791 x = fold_convert_loc (clause_loc, sizetype, x);
9792 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9793 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9794 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9795 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9797 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9798 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9799 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9800 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9802 tree var4 = is_for_simd ? new_var : var2;
9803 tree var5 = NULL_TREE, var6 = NULL_TREE;
9804 if (is_for_simd)
9806 var5 = lookup_decl (var, input_simd_ctx);
9807 var6 = lookup_decl (var, scan_simd_ctx);
9808 if (new_vard != new_var)
9810 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9811 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9814 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9816 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9817 tree val = var2;
9819 x = lang_hooks.decls.omp_clause_default_ctor
9820 (c, var2, build_outer_var_ref (var, ctx));
9821 if (x)
9822 gimplify_and_add (x, &clist);
9824 x = build_outer_var_ref (var, ctx);
9825 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9827 gimplify_and_add (x, &thr01_list);
9829 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9830 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9831 if (var3)
9833 x = unshare_expr (var4);
9834 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9835 gimplify_and_add (x, &thrn1_list);
9836 x = unshare_expr (var4);
9837 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9838 gimplify_and_add (x, &thr02_list);
9840 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9842 /* Otherwise, assign to it the identity element. */
9843 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9844 tseq = copy_gimple_seq_and_replace_locals (tseq);
9845 if (!is_for_simd)
9847 if (new_vard != new_var)
9848 val = build_fold_addr_expr_loc (clause_loc, val);
9849 SET_DECL_VALUE_EXPR (new_vard, val);
9850 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9852 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9853 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9854 lower_omp (&tseq, ctx);
9855 gimple_seq_add_seq (&thrn1_list, tseq);
9856 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9857 lower_omp (&tseq, ctx);
9858 gimple_seq_add_seq (&thr02_list, tseq);
9859 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9860 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9861 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9862 if (y)
9863 SET_DECL_VALUE_EXPR (new_vard, y);
9864 else
9866 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9867 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9871 x = unshare_expr (var4);
9872 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9873 gimplify_and_add (x, &thrn2_list);
9875 if (is_for_simd)
9877 x = unshare_expr (rprivb_ref);
9878 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9879 gimplify_and_add (x, &scan1_list);
9881 else
9883 if (ctx->scan_exclusive)
9885 x = unshare_expr (rprivb_ref);
9886 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9887 gimplify_and_add (x, &scan1_list);
9890 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9891 tseq = copy_gimple_seq_and_replace_locals (tseq);
9892 SET_DECL_VALUE_EXPR (placeholder, var2);
9893 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9894 lower_omp (&tseq, ctx);
9895 gimple_seq_add_seq (&scan1_list, tseq);
9897 if (ctx->scan_inclusive)
9899 x = unshare_expr (rprivb_ref);
9900 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9901 gimplify_and_add (x, &scan1_list);
9905 x = unshare_expr (rpriva_ref);
9906 x = lang_hooks.decls.omp_clause_assign_op (c, x,
9907 unshare_expr (var4));
9908 gimplify_and_add (x, &mdlist);
9910 x = unshare_expr (is_for_simd ? var6 : new_var);
9911 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
9912 gimplify_and_add (x, &input2_list);
9914 val = rprivb_ref;
9915 if (new_vard != new_var)
9916 val = build_fold_addr_expr_loc (clause_loc, val);
9918 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9919 tseq = copy_gimple_seq_and_replace_locals (tseq);
9920 SET_DECL_VALUE_EXPR (new_vard, val);
9921 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9922 if (is_for_simd)
9924 SET_DECL_VALUE_EXPR (placeholder, var6);
9925 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9927 else
9928 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9929 lower_omp (&tseq, ctx);
9930 if (y)
9931 SET_DECL_VALUE_EXPR (new_vard, y);
9932 else
9934 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9935 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9937 if (!is_for_simd)
9939 SET_DECL_VALUE_EXPR (placeholder, new_var);
9940 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9941 lower_omp (&tseq, ctx);
9943 gimple_seq_add_seq (&input2_list, tseq);
9945 x = build_outer_var_ref (var, ctx);
9946 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9947 gimplify_and_add (x, &last_list);
9949 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9950 gimplify_and_add (x, &reduc_list);
9951 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9952 tseq = copy_gimple_seq_and_replace_locals (tseq);
9953 val = rprival_ref;
9954 if (new_vard != new_var)
9955 val = build_fold_addr_expr_loc (clause_loc, val);
9956 SET_DECL_VALUE_EXPR (new_vard, val);
9957 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9958 SET_DECL_VALUE_EXPR (placeholder, var2);
9959 lower_omp (&tseq, ctx);
9960 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9961 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9962 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9963 if (y)
9964 SET_DECL_VALUE_EXPR (new_vard, y);
9965 else
9967 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9968 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9970 gimple_seq_add_seq (&reduc_list, tseq);
9971 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9972 gimplify_and_add (x, &reduc_list);
9974 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9975 if (x)
9976 gimplify_and_add (x, dlist);
9978 else
9980 x = build_outer_var_ref (var, ctx);
9981 gimplify_assign (unshare_expr (var4), x, &thr01_list);
9983 x = omp_reduction_init (c, TREE_TYPE (new_var));
9984 gimplify_assign (unshare_expr (var4), unshare_expr (x),
9985 &thrn1_list);
9986 gimplify_assign (unshare_expr (var4), x, &thr02_list);
9988 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
9990 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9991 if (code == MINUS_EXPR)
9992 code = PLUS_EXPR;
9994 if (is_for_simd)
9995 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
9996 else
9998 if (ctx->scan_exclusive)
9999 gimplify_assign (unshare_expr (rprivb_ref), var2,
10000 &scan1_list);
10001 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10002 gimplify_assign (var2, x, &scan1_list);
10003 if (ctx->scan_inclusive)
10004 gimplify_assign (unshare_expr (rprivb_ref), var2,
10005 &scan1_list);
10008 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10009 &mdlist);
10011 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10012 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10014 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10015 &last_list);
10017 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10018 unshare_expr (rprival_ref));
10019 gimplify_assign (rprival_ref, x, &reduc_list);
10023 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10024 gimple_seq_add_stmt (&scan1_list, g);
10025 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10026 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10027 ? scan_stmt4 : scan_stmt2), g);
10029 tree controlb = create_tmp_var (boolean_type_node);
10030 tree controlp = create_tmp_var (ptr_type_node);
10031 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10032 OMP_CLAUSE_DECL (nc) = controlb;
10033 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10034 *cp1 = nc;
10035 cp1 = &OMP_CLAUSE_CHAIN (nc);
10036 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10037 OMP_CLAUSE_DECL (nc) = controlp;
10038 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10039 *cp1 = nc;
10040 cp1 = &OMP_CLAUSE_CHAIN (nc);
10041 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10042 OMP_CLAUSE_DECL (nc) = controlb;
10043 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10044 *cp2 = nc;
10045 cp2 = &OMP_CLAUSE_CHAIN (nc);
10046 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10047 OMP_CLAUSE_DECL (nc) = controlp;
10048 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10049 *cp2 = nc;
10050 cp2 = &OMP_CLAUSE_CHAIN (nc);
10052 *cp1 = gimple_omp_for_clauses (stmt);
10053 gimple_omp_for_set_clauses (stmt, new_clauses1);
10054 *cp2 = gimple_omp_for_clauses (new_stmt);
10055 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10057 if (is_for_simd)
10059 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10060 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10062 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10063 GSI_SAME_STMT);
10064 gsi_remove (&input3_gsi, true);
10065 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10066 GSI_SAME_STMT);
10067 gsi_remove (&scan3_gsi, true);
10068 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10069 GSI_SAME_STMT);
10070 gsi_remove (&input4_gsi, true);
10071 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10072 GSI_SAME_STMT);
10073 gsi_remove (&scan4_gsi, true);
10075 else
10077 gimple_omp_set_body (scan_stmt1, scan1_list);
10078 gimple_omp_set_body (input_stmt2, input2_list);
10081 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10082 GSI_SAME_STMT);
10083 gsi_remove (&input1_gsi, true);
10084 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10085 GSI_SAME_STMT);
10086 gsi_remove (&scan1_gsi, true);
10087 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10088 GSI_SAME_STMT);
10089 gsi_remove (&input2_gsi, true);
10090 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10091 GSI_SAME_STMT);
10092 gsi_remove (&scan2_gsi, true);
10094 gimple_seq_add_seq (body_p, clist);
10096 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10097 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10098 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10099 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10100 gimple_seq_add_stmt (body_p, g);
10101 g = gimple_build_label (lab1);
10102 gimple_seq_add_stmt (body_p, g);
10103 gimple_seq_add_seq (body_p, thr01_list);
10104 g = gimple_build_goto (lab3);
10105 gimple_seq_add_stmt (body_p, g);
10106 g = gimple_build_label (lab2);
10107 gimple_seq_add_stmt (body_p, g);
10108 gimple_seq_add_seq (body_p, thrn1_list);
10109 g = gimple_build_label (lab3);
10110 gimple_seq_add_stmt (body_p, g);
10112 g = gimple_build_assign (ivar, size_zero_node);
10113 gimple_seq_add_stmt (body_p, g);
10115 gimple_seq_add_stmt (body_p, stmt);
10116 gimple_seq_add_seq (body_p, body);
10117 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10118 fd->loop.v));
10120 g = gimple_build_omp_return (true);
10121 gimple_seq_add_stmt (body_p, g);
10122 gimple_seq_add_seq (body_p, mdlist);
10124 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10125 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10126 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10127 gimple_seq_add_stmt (body_p, g);
10128 g = gimple_build_label (lab1);
10129 gimple_seq_add_stmt (body_p, g);
10131 g = omp_build_barrier (NULL);
10132 gimple_seq_add_stmt (body_p, g);
10134 tree down = create_tmp_var (unsigned_type_node);
10135 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10136 gimple_seq_add_stmt (body_p, g);
10138 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10139 gimple_seq_add_stmt (body_p, g);
10141 tree num_threadsu = create_tmp_var (unsigned_type_node);
10142 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10143 gimple_seq_add_stmt (body_p, g);
10145 tree thread_numu = create_tmp_var (unsigned_type_node);
10146 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10147 gimple_seq_add_stmt (body_p, g);
10149 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10150 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10151 build_int_cst (unsigned_type_node, 1));
10152 gimple_seq_add_stmt (body_p, g);
10154 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10155 g = gimple_build_label (lab3);
10156 gimple_seq_add_stmt (body_p, g);
10158 tree twok = create_tmp_var (unsigned_type_node);
10159 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10160 gimple_seq_add_stmt (body_p, g);
10162 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10163 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10164 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10165 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10166 gimple_seq_add_stmt (body_p, g);
10167 g = gimple_build_label (lab4);
10168 gimple_seq_add_stmt (body_p, g);
10169 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10170 gimple_seq_add_stmt (body_p, g);
10171 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10172 gimple_seq_add_stmt (body_p, g);
10174 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10175 gimple_seq_add_stmt (body_p, g);
10176 g = gimple_build_label (lab6);
10177 gimple_seq_add_stmt (body_p, g);
10179 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10180 gimple_seq_add_stmt (body_p, g);
10182 g = gimple_build_label (lab5);
10183 gimple_seq_add_stmt (body_p, g);
10185 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10186 gimple_seq_add_stmt (body_p, g);
10188 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10189 DECL_GIMPLE_REG_P (cplx) = 1;
10190 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10191 gimple_call_set_lhs (g, cplx);
10192 gimple_seq_add_stmt (body_p, g);
10193 tree mul = create_tmp_var (unsigned_type_node);
10194 g = gimple_build_assign (mul, REALPART_EXPR,
10195 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10196 gimple_seq_add_stmt (body_p, g);
10197 tree ovf = create_tmp_var (unsigned_type_node);
10198 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10199 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10200 gimple_seq_add_stmt (body_p, g);
10202 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10203 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10204 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10205 lab7, lab8);
10206 gimple_seq_add_stmt (body_p, g);
10207 g = gimple_build_label (lab7);
10208 gimple_seq_add_stmt (body_p, g);
10210 tree andv = create_tmp_var (unsigned_type_node);
10211 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10212 gimple_seq_add_stmt (body_p, g);
10213 tree andvm1 = create_tmp_var (unsigned_type_node);
10214 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10215 build_minus_one_cst (unsigned_type_node));
10216 gimple_seq_add_stmt (body_p, g);
10218 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10219 gimple_seq_add_stmt (body_p, g);
10221 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10222 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10223 gimple_seq_add_stmt (body_p, g);
10224 g = gimple_build_label (lab9);
10225 gimple_seq_add_stmt (body_p, g);
10226 gimple_seq_add_seq (body_p, reduc_list);
10227 g = gimple_build_label (lab8);
10228 gimple_seq_add_stmt (body_p, g);
10230 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10231 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10232 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10233 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10234 lab10, lab11);
10235 gimple_seq_add_stmt (body_p, g);
10236 g = gimple_build_label (lab10);
10237 gimple_seq_add_stmt (body_p, g);
10238 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10239 gimple_seq_add_stmt (body_p, g);
10240 g = gimple_build_goto (lab12);
10241 gimple_seq_add_stmt (body_p, g);
10242 g = gimple_build_label (lab11);
10243 gimple_seq_add_stmt (body_p, g);
10244 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10245 gimple_seq_add_stmt (body_p, g);
10246 g = gimple_build_label (lab12);
10247 gimple_seq_add_stmt (body_p, g);
10249 g = omp_build_barrier (NULL);
10250 gimple_seq_add_stmt (body_p, g);
10252 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10253 lab3, lab2);
10254 gimple_seq_add_stmt (body_p, g);
10256 g = gimple_build_label (lab2);
10257 gimple_seq_add_stmt (body_p, g);
10259 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10260 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10261 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10262 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10263 gimple_seq_add_stmt (body_p, g);
10264 g = gimple_build_label (lab1);
10265 gimple_seq_add_stmt (body_p, g);
10266 gimple_seq_add_seq (body_p, thr02_list);
10267 g = gimple_build_goto (lab3);
10268 gimple_seq_add_stmt (body_p, g);
10269 g = gimple_build_label (lab2);
10270 gimple_seq_add_stmt (body_p, g);
10271 gimple_seq_add_seq (body_p, thrn2_list);
10272 g = gimple_build_label (lab3);
10273 gimple_seq_add_stmt (body_p, g);
10275 g = gimple_build_assign (ivar, size_zero_node);
10276 gimple_seq_add_stmt (body_p, g);
10277 gimple_seq_add_stmt (body_p, new_stmt);
10278 gimple_seq_add_seq (body_p, new_body);
10280 gimple_seq new_dlist = NULL;
10281 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10282 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10283 tree num_threadsm1 = create_tmp_var (integer_type_node);
10284 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10285 integer_minus_one_node);
10286 gimple_seq_add_stmt (&new_dlist, g);
10287 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10288 gimple_seq_add_stmt (&new_dlist, g);
10289 g = gimple_build_label (lab1);
10290 gimple_seq_add_stmt (&new_dlist, g);
10291 gimple_seq_add_seq (&new_dlist, last_list);
10292 g = gimple_build_label (lab2);
10293 gimple_seq_add_stmt (&new_dlist, g);
10294 gimple_seq_add_seq (&new_dlist, *dlist);
10295 *dlist = new_dlist;
10298 /* Lower code for an OMP loop directive. */
10300 static void
10301 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10303 tree *rhs_p, block;
10304 struct omp_for_data fd, *fdp = NULL;
10305 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10306 gbind *new_stmt;
10307 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10308 gimple_seq cnt_list = NULL, clist = NULL;
10309 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10310 size_t i;
10312 push_gimplify_context ();
10314 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10316 block = make_node (BLOCK);
10317 new_stmt = gimple_build_bind (NULL, NULL, block);
10318 /* Replace at gsi right away, so that 'stmt' is no member
10319 of a sequence anymore as we're going to add to a different
10320 one below. */
10321 gsi_replace (gsi_p, new_stmt, true);
10323 /* Move declaration of temporaries in the loop body before we make
10324 it go away. */
10325 omp_for_body = gimple_omp_body (stmt);
10326 if (!gimple_seq_empty_p (omp_for_body)
10327 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10329 gbind *inner_bind
10330 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10331 tree vars = gimple_bind_vars (inner_bind);
10332 gimple_bind_append_vars (new_stmt, vars);
10333 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10334 keep them on the inner_bind and it's block. */
10335 gimple_bind_set_vars (inner_bind, NULL_TREE);
10336 if (gimple_bind_block (inner_bind))
10337 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10340 if (gimple_omp_for_combined_into_p (stmt))
10342 omp_extract_for_data (stmt, &fd, NULL);
10343 fdp = &fd;
10345 /* We need two temporaries with fd.loop.v type (istart/iend)
10346 and then (fd.collapse - 1) temporaries with the same
10347 type for count2 ... countN-1 vars if not constant. */
10348 size_t count = 2;
10349 tree type = fd.iter_type;
10350 if (fd.collapse > 1
10351 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10352 count += fd.collapse - 1;
10353 bool taskreg_for
10354 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10355 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10356 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10357 tree simtc = NULL;
10358 tree clauses = *pc;
10359 if (taskreg_for)
10360 outerc
10361 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10362 OMP_CLAUSE__LOOPTEMP_);
10363 if (ctx->simt_stmt)
10364 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10365 OMP_CLAUSE__LOOPTEMP_);
10366 for (i = 0; i < count; i++)
10368 tree temp;
10369 if (taskreg_for)
10371 gcc_assert (outerc);
10372 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10373 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10374 OMP_CLAUSE__LOOPTEMP_);
10376 else
10378 /* If there are 2 adjacent SIMD stmts, one with _simt_
10379 clause, another without, make sure they have the same
10380 decls in _looptemp_ clauses, because the outer stmt
10381 they are combined into will look up just one inner_stmt. */
10382 if (ctx->simt_stmt)
10383 temp = OMP_CLAUSE_DECL (simtc);
10384 else
10385 temp = create_tmp_var (type);
10386 insert_decl_map (&ctx->outer->cb, temp, temp);
10388 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10389 OMP_CLAUSE_DECL (*pc) = temp;
10390 pc = &OMP_CLAUSE_CHAIN (*pc);
10391 if (ctx->simt_stmt)
10392 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10393 OMP_CLAUSE__LOOPTEMP_);
10395 *pc = clauses;
10398 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10399 dlist = NULL;
10400 body = NULL;
10401 tree rclauses
10402 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10403 OMP_CLAUSE_REDUCTION);
10404 tree rtmp = NULL_TREE;
10405 if (rclauses)
10407 tree type = build_pointer_type (pointer_sized_int_node);
10408 tree temp = create_tmp_var (type);
10409 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10410 OMP_CLAUSE_DECL (c) = temp;
10411 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10412 gimple_omp_for_set_clauses (stmt, c);
10413 lower_omp_task_reductions (ctx, OMP_FOR,
10414 gimple_omp_for_clauses (stmt),
10415 &tred_ilist, &tred_dlist);
10416 rclauses = c;
10417 rtmp = make_ssa_name (type);
10418 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10421 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10422 ctx);
10424 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10425 fdp);
10426 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10427 gimple_omp_for_pre_body (stmt));
10429 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10431 /* Lower the header expressions. At this point, we can assume that
10432 the header is of the form:
10434 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10436 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10437 using the .omp_data_s mapping, if needed. */
10438 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10440 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10441 if (!is_gimple_min_invariant (*rhs_p))
10442 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10443 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10444 recompute_tree_invariant_for_addr_expr (*rhs_p);
10446 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10447 if (!is_gimple_min_invariant (*rhs_p))
10448 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10449 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10450 recompute_tree_invariant_for_addr_expr (*rhs_p);
10452 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10453 if (!is_gimple_min_invariant (*rhs_p))
10454 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10456 if (rclauses)
10457 gimple_seq_add_seq (&tred_ilist, cnt_list);
10458 else
10459 gimple_seq_add_seq (&body, cnt_list);
10461 /* Once lowered, extract the bounds and clauses. */
10462 omp_extract_for_data (stmt, &fd, NULL);
10464 if (is_gimple_omp_oacc (ctx->stmt)
10465 && !ctx_in_oacc_kernels_region (ctx))
10466 lower_oacc_head_tail (gimple_location (stmt),
10467 gimple_omp_for_clauses (stmt),
10468 &oacc_head, &oacc_tail, ctx);
10470 /* Add OpenACC partitioning and reduction markers just before the loop. */
10471 if (oacc_head)
10472 gimple_seq_add_seq (&body, oacc_head);
10474 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10476 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10477 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10478 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10479 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10481 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10482 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10483 OMP_CLAUSE_LINEAR_STEP (c)
10484 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10485 ctx);
10488 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10489 && gimple_omp_for_grid_phony (stmt));
10490 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10491 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10493 gcc_assert (!phony_loop);
10494 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10496 else
10498 if (!phony_loop)
10499 gimple_seq_add_stmt (&body, stmt);
10500 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10503 if (!phony_loop)
10504 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10505 fd.loop.v));
10507 /* After the loop, add exit clauses. */
10508 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10510 if (clist)
10512 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10513 gcall *g = gimple_build_call (fndecl, 0);
10514 gimple_seq_add_stmt (&body, g);
10515 gimple_seq_add_seq (&body, clist);
10516 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10517 g = gimple_build_call (fndecl, 0);
10518 gimple_seq_add_stmt (&body, g);
10521 if (ctx->cancellable)
10522 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10524 gimple_seq_add_seq (&body, dlist);
10526 if (rclauses)
10528 gimple_seq_add_seq (&tred_ilist, body);
10529 body = tred_ilist;
10532 body = maybe_catch_exception (body);
10534 if (!phony_loop)
10536 /* Region exit marker goes at the end of the loop body. */
10537 gimple *g = gimple_build_omp_return (fd.have_nowait);
10538 gimple_seq_add_stmt (&body, g);
10540 gimple_seq_add_seq (&body, tred_dlist);
10542 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10544 if (rclauses)
10545 OMP_CLAUSE_DECL (rclauses) = rtmp;
10548 /* Add OpenACC joining and reduction markers just after the loop. */
10549 if (oacc_tail)
10550 gimple_seq_add_seq (&body, oacc_tail);
10552 pop_gimplify_context (new_stmt);
10554 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10555 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10556 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10557 if (BLOCK_VARS (block))
10558 TREE_USED (block) = 1;
10560 gimple_bind_set_body (new_stmt, body);
10561 gimple_omp_set_body (stmt, NULL);
10562 gimple_omp_for_set_pre_body (stmt, NULL);
10565 /* Callback for walk_stmts. Check if the current statement only contains
10566 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10568 static tree
10569 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10570 bool *handled_ops_p,
10571 struct walk_stmt_info *wi)
10573 int *info = (int *) wi->info;
10574 gimple *stmt = gsi_stmt (*gsi_p);
10576 *handled_ops_p = true;
10577 switch (gimple_code (stmt))
10579 WALK_SUBSTMTS;
10581 case GIMPLE_DEBUG:
10582 break;
10583 case GIMPLE_OMP_FOR:
10584 case GIMPLE_OMP_SECTIONS:
10585 *info = *info == 0 ? 1 : -1;
10586 break;
10587 default:
10588 *info = -1;
10589 break;
10591 return NULL;
10594 struct omp_taskcopy_context
10596 /* This field must be at the beginning, as we do "inheritance": Some
10597 callback functions for tree-inline.c (e.g., omp_copy_decl)
10598 receive a copy_body_data pointer that is up-casted to an
10599 omp_context pointer. */
10600 copy_body_data cb;
10601 omp_context *ctx;
10604 static tree
10605 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10607 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10609 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10610 return create_tmp_var (TREE_TYPE (var));
10612 return var;
10615 static tree
10616 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10618 tree name, new_fields = NULL, type, f;
10620 type = lang_hooks.types.make_type (RECORD_TYPE);
10621 name = DECL_NAME (TYPE_NAME (orig_type));
10622 name = build_decl (gimple_location (tcctx->ctx->stmt),
10623 TYPE_DECL, name, type);
10624 TYPE_NAME (type) = name;
10626 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10628 tree new_f = copy_node (f);
10629 DECL_CONTEXT (new_f) = type;
10630 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10631 TREE_CHAIN (new_f) = new_fields;
10632 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10633 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10634 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10635 &tcctx->cb, NULL);
10636 new_fields = new_f;
10637 tcctx->cb.decl_map->put (f, new_f);
10639 TYPE_FIELDS (type) = nreverse (new_fields);
10640 layout_type (type);
10641 return type;
10644 /* Create task copyfn. */
10646 static void
10647 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10649 struct function *child_cfun;
10650 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10651 tree record_type, srecord_type, bind, list;
10652 bool record_needs_remap = false, srecord_needs_remap = false;
10653 splay_tree_node n;
10654 struct omp_taskcopy_context tcctx;
10655 location_t loc = gimple_location (task_stmt);
10656 size_t looptempno = 0;
10658 child_fn = gimple_omp_task_copy_fn (task_stmt);
10659 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10660 gcc_assert (child_cfun->cfg == NULL);
10661 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10663 /* Reset DECL_CONTEXT on function arguments. */
10664 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10665 DECL_CONTEXT (t) = child_fn;
10667 /* Populate the function. */
10668 push_gimplify_context ();
10669 push_cfun (child_cfun);
10671 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10672 TREE_SIDE_EFFECTS (bind) = 1;
10673 list = NULL;
10674 DECL_SAVED_TREE (child_fn) = bind;
10675 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10677 /* Remap src and dst argument types if needed. */
10678 record_type = ctx->record_type;
10679 srecord_type = ctx->srecord_type;
10680 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10681 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10683 record_needs_remap = true;
10684 break;
10686 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10687 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10689 srecord_needs_remap = true;
10690 break;
10693 if (record_needs_remap || srecord_needs_remap)
10695 memset (&tcctx, '\0', sizeof (tcctx));
10696 tcctx.cb.src_fn = ctx->cb.src_fn;
10697 tcctx.cb.dst_fn = child_fn;
10698 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10699 gcc_checking_assert (tcctx.cb.src_node);
10700 tcctx.cb.dst_node = tcctx.cb.src_node;
10701 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10702 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10703 tcctx.cb.eh_lp_nr = 0;
10704 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10705 tcctx.cb.decl_map = new hash_map<tree, tree>;
10706 tcctx.ctx = ctx;
10708 if (record_needs_remap)
10709 record_type = task_copyfn_remap_type (&tcctx, record_type);
10710 if (srecord_needs_remap)
10711 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10713 else
10714 tcctx.cb.decl_map = NULL;
10716 arg = DECL_ARGUMENTS (child_fn);
10717 TREE_TYPE (arg) = build_pointer_type (record_type);
10718 sarg = DECL_CHAIN (arg);
10719 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10721 /* First pass: initialize temporaries used in record_type and srecord_type
10722 sizes and field offsets. */
10723 if (tcctx.cb.decl_map)
10724 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10725 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10727 tree *p;
10729 decl = OMP_CLAUSE_DECL (c);
10730 p = tcctx.cb.decl_map->get (decl);
10731 if (p == NULL)
10732 continue;
10733 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10734 sf = (tree) n->value;
10735 sf = *tcctx.cb.decl_map->get (sf);
10736 src = build_simple_mem_ref_loc (loc, sarg);
10737 src = omp_build_component_ref (src, sf);
10738 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10739 append_to_statement_list (t, &list);
10742 /* Second pass: copy shared var pointers and copy construct non-VLA
10743 firstprivate vars. */
10744 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10745 switch (OMP_CLAUSE_CODE (c))
10747 splay_tree_key key;
10748 case OMP_CLAUSE_SHARED:
10749 decl = OMP_CLAUSE_DECL (c);
10750 key = (splay_tree_key) decl;
10751 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10752 key = (splay_tree_key) &DECL_UID (decl);
10753 n = splay_tree_lookup (ctx->field_map, key);
10754 if (n == NULL)
10755 break;
10756 f = (tree) n->value;
10757 if (tcctx.cb.decl_map)
10758 f = *tcctx.cb.decl_map->get (f);
10759 n = splay_tree_lookup (ctx->sfield_map, key);
10760 sf = (tree) n->value;
10761 if (tcctx.cb.decl_map)
10762 sf = *tcctx.cb.decl_map->get (sf);
10763 src = build_simple_mem_ref_loc (loc, sarg);
10764 src = omp_build_component_ref (src, sf);
10765 dst = build_simple_mem_ref_loc (loc, arg);
10766 dst = omp_build_component_ref (dst, f);
10767 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10768 append_to_statement_list (t, &list);
10769 break;
10770 case OMP_CLAUSE_REDUCTION:
10771 case OMP_CLAUSE_IN_REDUCTION:
10772 decl = OMP_CLAUSE_DECL (c);
10773 if (TREE_CODE (decl) == MEM_REF)
10775 decl = TREE_OPERAND (decl, 0);
10776 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10777 decl = TREE_OPERAND (decl, 0);
10778 if (TREE_CODE (decl) == INDIRECT_REF
10779 || TREE_CODE (decl) == ADDR_EXPR)
10780 decl = TREE_OPERAND (decl, 0);
10782 key = (splay_tree_key) decl;
10783 n = splay_tree_lookup (ctx->field_map, key);
10784 if (n == NULL)
10785 break;
10786 f = (tree) n->value;
10787 if (tcctx.cb.decl_map)
10788 f = *tcctx.cb.decl_map->get (f);
10789 n = splay_tree_lookup (ctx->sfield_map, key);
10790 sf = (tree) n->value;
10791 if (tcctx.cb.decl_map)
10792 sf = *tcctx.cb.decl_map->get (sf);
10793 src = build_simple_mem_ref_loc (loc, sarg);
10794 src = omp_build_component_ref (src, sf);
10795 if (decl != OMP_CLAUSE_DECL (c)
10796 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10797 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10798 src = build_simple_mem_ref_loc (loc, src);
10799 dst = build_simple_mem_ref_loc (loc, arg);
10800 dst = omp_build_component_ref (dst, f);
10801 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10802 append_to_statement_list (t, &list);
10803 break;
10804 case OMP_CLAUSE__LOOPTEMP_:
10805 /* Fields for first two _looptemp_ clauses are initialized by
10806 GOMP_taskloop*, the rest are handled like firstprivate. */
10807 if (looptempno < 2)
10809 looptempno++;
10810 break;
10812 /* FALLTHRU */
10813 case OMP_CLAUSE__REDUCTEMP_:
10814 case OMP_CLAUSE_FIRSTPRIVATE:
10815 decl = OMP_CLAUSE_DECL (c);
10816 if (is_variable_sized (decl))
10817 break;
10818 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10819 if (n == NULL)
10820 break;
10821 f = (tree) n->value;
10822 if (tcctx.cb.decl_map)
10823 f = *tcctx.cb.decl_map->get (f);
10824 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10825 if (n != NULL)
10827 sf = (tree) n->value;
10828 if (tcctx.cb.decl_map)
10829 sf = *tcctx.cb.decl_map->get (sf);
10830 src = build_simple_mem_ref_loc (loc, sarg);
10831 src = omp_build_component_ref (src, sf);
10832 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10833 src = build_simple_mem_ref_loc (loc, src);
10835 else
10836 src = decl;
10837 dst = build_simple_mem_ref_loc (loc, arg);
10838 dst = omp_build_component_ref (dst, f);
10839 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10840 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10841 else
10842 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10843 append_to_statement_list (t, &list);
10844 break;
10845 case OMP_CLAUSE_PRIVATE:
10846 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10847 break;
10848 decl = OMP_CLAUSE_DECL (c);
10849 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10850 f = (tree) n->value;
10851 if (tcctx.cb.decl_map)
10852 f = *tcctx.cb.decl_map->get (f);
10853 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10854 if (n != NULL)
10856 sf = (tree) n->value;
10857 if (tcctx.cb.decl_map)
10858 sf = *tcctx.cb.decl_map->get (sf);
10859 src = build_simple_mem_ref_loc (loc, sarg);
10860 src = omp_build_component_ref (src, sf);
10861 if (use_pointer_for_field (decl, NULL))
10862 src = build_simple_mem_ref_loc (loc, src);
10864 else
10865 src = decl;
10866 dst = build_simple_mem_ref_loc (loc, arg);
10867 dst = omp_build_component_ref (dst, f);
10868 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10869 append_to_statement_list (t, &list);
10870 break;
10871 default:
10872 break;
10875 /* Last pass: handle VLA firstprivates. */
10876 if (tcctx.cb.decl_map)
10877 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10878 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10880 tree ind, ptr, df;
10882 decl = OMP_CLAUSE_DECL (c);
10883 if (!is_variable_sized (decl))
10884 continue;
10885 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10886 if (n == NULL)
10887 continue;
10888 f = (tree) n->value;
10889 f = *tcctx.cb.decl_map->get (f);
10890 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10891 ind = DECL_VALUE_EXPR (decl);
10892 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10893 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10894 n = splay_tree_lookup (ctx->sfield_map,
10895 (splay_tree_key) TREE_OPERAND (ind, 0));
10896 sf = (tree) n->value;
10897 sf = *tcctx.cb.decl_map->get (sf);
10898 src = build_simple_mem_ref_loc (loc, sarg);
10899 src = omp_build_component_ref (src, sf);
10900 src = build_simple_mem_ref_loc (loc, src);
10901 dst = build_simple_mem_ref_loc (loc, arg);
10902 dst = omp_build_component_ref (dst, f);
10903 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10904 append_to_statement_list (t, &list);
10905 n = splay_tree_lookup (ctx->field_map,
10906 (splay_tree_key) TREE_OPERAND (ind, 0));
10907 df = (tree) n->value;
10908 df = *tcctx.cb.decl_map->get (df);
10909 ptr = build_simple_mem_ref_loc (loc, arg);
10910 ptr = omp_build_component_ref (ptr, df);
10911 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10912 build_fold_addr_expr_loc (loc, dst));
10913 append_to_statement_list (t, &list);
10916 t = build1 (RETURN_EXPR, void_type_node, NULL);
10917 append_to_statement_list (t, &list);
10919 if (tcctx.cb.decl_map)
10920 delete tcctx.cb.decl_map;
10921 pop_gimplify_context (NULL);
10922 BIND_EXPR_BODY (bind) = list;
10923 pop_cfun ();
10926 static void
10927 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
10929 tree c, clauses;
10930 gimple *g;
10931 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
10933 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10934 gcc_assert (clauses);
10935 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10936 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10937 switch (OMP_CLAUSE_DEPEND_KIND (c))
10939 case OMP_CLAUSE_DEPEND_LAST:
10940 /* Lowering already done at gimplification. */
10941 return;
10942 case OMP_CLAUSE_DEPEND_IN:
10943 cnt[2]++;
10944 break;
10945 case OMP_CLAUSE_DEPEND_OUT:
10946 case OMP_CLAUSE_DEPEND_INOUT:
10947 cnt[0]++;
10948 break;
10949 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10950 cnt[1]++;
10951 break;
10952 case OMP_CLAUSE_DEPEND_DEPOBJ:
10953 cnt[3]++;
10954 break;
10955 case OMP_CLAUSE_DEPEND_SOURCE:
10956 case OMP_CLAUSE_DEPEND_SINK:
10957 /* FALLTHRU */
10958 default:
10959 gcc_unreachable ();
10961 if (cnt[1] || cnt[3])
10962 idx = 5;
10963 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10964 tree type = build_array_type_nelts (ptr_type_node, total + idx);
10965 tree array = create_tmp_var (type);
10966 TREE_ADDRESSABLE (array) = 1;
10967 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10968 NULL_TREE);
10969 if (idx == 5)
10971 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10972 gimple_seq_add_stmt (iseq, g);
10973 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10974 NULL_TREE);
10976 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
10977 gimple_seq_add_stmt (iseq, g);
10978 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10980 r = build4 (ARRAY_REF, ptr_type_node, array,
10981 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10982 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10983 gimple_seq_add_stmt (iseq, g);
10985 for (i = 0; i < 4; i++)
10987 if (cnt[i] == 0)
10988 continue;
10989 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10990 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
10991 continue;
10992 else
10994 switch (OMP_CLAUSE_DEPEND_KIND (c))
10996 case OMP_CLAUSE_DEPEND_IN:
10997 if (i != 2)
10998 continue;
10999 break;
11000 case OMP_CLAUSE_DEPEND_OUT:
11001 case OMP_CLAUSE_DEPEND_INOUT:
11002 if (i != 0)
11003 continue;
11004 break;
11005 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11006 if (i != 1)
11007 continue;
11008 break;
11009 case OMP_CLAUSE_DEPEND_DEPOBJ:
11010 if (i != 3)
11011 continue;
11012 break;
11013 default:
11014 gcc_unreachable ();
11016 tree t = OMP_CLAUSE_DECL (c);
11017 t = fold_convert (ptr_type_node, t);
11018 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11019 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11020 NULL_TREE, NULL_TREE);
11021 g = gimple_build_assign (r, t);
11022 gimple_seq_add_stmt (iseq, g);
11025 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11026 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11027 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11028 OMP_CLAUSE_CHAIN (c) = *pclauses;
11029 *pclauses = c;
11030 tree clobber = build_clobber (type);
11031 g = gimple_build_assign (array, clobber);
11032 gimple_seq_add_stmt (oseq, g);
11035 /* Lower the OpenMP parallel or task directive in the current statement
11036 in GSI_P. CTX holds context information for the directive. */
11038 static void
11039 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11041 tree clauses;
11042 tree child_fn, t;
11043 gimple *stmt = gsi_stmt (*gsi_p);
11044 gbind *par_bind, *bind, *dep_bind = NULL;
11045 gimple_seq par_body;
11046 location_t loc = gimple_location (stmt);
11048 clauses = gimple_omp_taskreg_clauses (stmt);
11049 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11050 && gimple_omp_task_taskwait_p (stmt))
11052 par_bind = NULL;
11053 par_body = NULL;
11055 else
11057 par_bind
11058 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11059 par_body = gimple_bind_body (par_bind);
11061 child_fn = ctx->cb.dst_fn;
11062 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11063 && !gimple_omp_parallel_combined_p (stmt))
11065 struct walk_stmt_info wi;
11066 int ws_num = 0;
11068 memset (&wi, 0, sizeof (wi));
11069 wi.info = &ws_num;
11070 wi.val_only = true;
11071 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11072 if (ws_num == 1)
11073 gimple_omp_parallel_set_combined_p (stmt, true);
11075 gimple_seq dep_ilist = NULL;
11076 gimple_seq dep_olist = NULL;
11077 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11078 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11080 push_gimplify_context ();
11081 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11082 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11083 &dep_ilist, &dep_olist);
11086 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11087 && gimple_omp_task_taskwait_p (stmt))
11089 if (dep_bind)
11091 gsi_replace (gsi_p, dep_bind, true);
11092 gimple_bind_add_seq (dep_bind, dep_ilist);
11093 gimple_bind_add_stmt (dep_bind, stmt);
11094 gimple_bind_add_seq (dep_bind, dep_olist);
11095 pop_gimplify_context (dep_bind);
11097 return;
11100 if (ctx->srecord_type)
11101 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11103 gimple_seq tskred_ilist = NULL;
11104 gimple_seq tskred_olist = NULL;
11105 if ((is_task_ctx (ctx)
11106 && gimple_omp_task_taskloop_p (ctx->stmt)
11107 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11108 OMP_CLAUSE_REDUCTION))
11109 || (is_parallel_ctx (ctx)
11110 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11111 OMP_CLAUSE__REDUCTEMP_)))
11113 if (dep_bind == NULL)
11115 push_gimplify_context ();
11116 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11118 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11119 : OMP_PARALLEL,
11120 gimple_omp_taskreg_clauses (ctx->stmt),
11121 &tskred_ilist, &tskred_olist);
11124 push_gimplify_context ();
11126 gimple_seq par_olist = NULL;
11127 gimple_seq par_ilist = NULL;
11128 gimple_seq par_rlist = NULL;
11129 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11130 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11131 if (phony_construct && ctx->record_type)
11133 gcc_checking_assert (!ctx->receiver_decl);
11134 ctx->receiver_decl = create_tmp_var
11135 (build_reference_type (ctx->record_type), ".omp_rec");
11137 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11138 lower_omp (&par_body, ctx);
11139 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
11140 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11142 /* Declare all the variables created by mapping and the variables
11143 declared in the scope of the parallel body. */
11144 record_vars_into (ctx->block_vars, child_fn);
11145 maybe_remove_omp_member_access_dummy_vars (par_bind);
11146 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11148 if (ctx->record_type)
11150 ctx->sender_decl
11151 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11152 : ctx->record_type, ".omp_data_o");
11153 DECL_NAMELESS (ctx->sender_decl) = 1;
11154 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11155 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11158 gimple_seq olist = NULL;
11159 gimple_seq ilist = NULL;
11160 lower_send_clauses (clauses, &ilist, &olist, ctx);
11161 lower_send_shared_vars (&ilist, &olist, ctx);
11163 if (ctx->record_type)
11165 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11166 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11167 clobber));
11170 /* Once all the expansions are done, sequence all the different
11171 fragments inside gimple_omp_body. */
11173 gimple_seq new_body = NULL;
11175 if (ctx->record_type)
11177 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11178 /* fixup_child_record_type might have changed receiver_decl's type. */
11179 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11180 gimple_seq_add_stmt (&new_body,
11181 gimple_build_assign (ctx->receiver_decl, t));
11184 gimple_seq_add_seq (&new_body, par_ilist);
11185 gimple_seq_add_seq (&new_body, par_body);
11186 gimple_seq_add_seq (&new_body, par_rlist);
11187 if (ctx->cancellable)
11188 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11189 gimple_seq_add_seq (&new_body, par_olist);
11190 new_body = maybe_catch_exception (new_body);
11191 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11192 gimple_seq_add_stmt (&new_body,
11193 gimple_build_omp_continue (integer_zero_node,
11194 integer_zero_node));
11195 if (!phony_construct)
11197 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11198 gimple_omp_set_body (stmt, new_body);
11201 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11202 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11203 else
11204 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11205 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11206 gimple_bind_add_seq (bind, ilist);
11207 if (!phony_construct)
11208 gimple_bind_add_stmt (bind, stmt);
11209 else
11210 gimple_bind_add_seq (bind, new_body);
11211 gimple_bind_add_seq (bind, olist);
11213 pop_gimplify_context (NULL);
11215 if (dep_bind)
11217 gimple_bind_add_seq (dep_bind, dep_ilist);
11218 gimple_bind_add_seq (dep_bind, tskred_ilist);
11219 gimple_bind_add_stmt (dep_bind, bind);
11220 gimple_bind_add_seq (dep_bind, tskred_olist);
11221 gimple_bind_add_seq (dep_bind, dep_olist);
11222 pop_gimplify_context (dep_bind);
11226 /* Lower the GIMPLE_OMP_TARGET in the current statement
11227 in GSI_P. CTX holds context information for the directive. */
11229 static void
11230 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11232 tree clauses;
11233 tree child_fn, t, c;
11234 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11235 gbind *tgt_bind, *bind, *dep_bind = NULL;
11236 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11237 location_t loc = gimple_location (stmt);
11238 bool offloaded, data_region;
11239 unsigned int map_cnt = 0;
11241 offloaded = is_gimple_omp_offloaded (stmt);
11242 switch (gimple_omp_target_kind (stmt))
11244 case GF_OMP_TARGET_KIND_REGION:
11245 case GF_OMP_TARGET_KIND_UPDATE:
11246 case GF_OMP_TARGET_KIND_ENTER_DATA:
11247 case GF_OMP_TARGET_KIND_EXIT_DATA:
11248 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11249 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11250 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11251 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11252 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11253 data_region = false;
11254 break;
11255 case GF_OMP_TARGET_KIND_DATA:
11256 case GF_OMP_TARGET_KIND_OACC_DATA:
11257 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11258 data_region = true;
11259 break;
11260 default:
11261 gcc_unreachable ();
11264 clauses = gimple_omp_target_clauses (stmt);
11266 gimple_seq dep_ilist = NULL;
11267 gimple_seq dep_olist = NULL;
11268 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11270 push_gimplify_context ();
11271 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11272 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11273 &dep_ilist, &dep_olist);
11276 tgt_bind = NULL;
11277 tgt_body = NULL;
11278 if (offloaded)
11280 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11281 tgt_body = gimple_bind_body (tgt_bind);
11283 else if (data_region)
11284 tgt_body = gimple_omp_body (stmt);
11285 child_fn = ctx->cb.dst_fn;
11287 push_gimplify_context ();
11288 fplist = NULL;
11290 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11291 switch (OMP_CLAUSE_CODE (c))
11293 tree var, x;
11295 default:
11296 break;
11297 case OMP_CLAUSE_MAP:
11298 #if CHECKING_P
11299 /* First check what we're prepared to handle in the following. */
11300 switch (OMP_CLAUSE_MAP_KIND (c))
11302 case GOMP_MAP_ALLOC:
11303 case GOMP_MAP_TO:
11304 case GOMP_MAP_FROM:
11305 case GOMP_MAP_TOFROM:
11306 case GOMP_MAP_POINTER:
11307 case GOMP_MAP_TO_PSET:
11308 case GOMP_MAP_DELETE:
11309 case GOMP_MAP_RELEASE:
11310 case GOMP_MAP_ALWAYS_TO:
11311 case GOMP_MAP_ALWAYS_FROM:
11312 case GOMP_MAP_ALWAYS_TOFROM:
11313 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11314 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11315 case GOMP_MAP_STRUCT:
11316 case GOMP_MAP_ALWAYS_POINTER:
11317 break;
11318 case GOMP_MAP_FORCE_ALLOC:
11319 case GOMP_MAP_FORCE_TO:
11320 case GOMP_MAP_FORCE_FROM:
11321 case GOMP_MAP_FORCE_TOFROM:
11322 case GOMP_MAP_FORCE_PRESENT:
11323 case GOMP_MAP_FORCE_DEVICEPTR:
11324 case GOMP_MAP_DEVICE_RESIDENT:
11325 case GOMP_MAP_LINK:
11326 gcc_assert (is_gimple_omp_oacc (stmt));
11327 break;
11328 default:
11329 gcc_unreachable ();
11331 #endif
11332 /* FALLTHRU */
11333 case OMP_CLAUSE_TO:
11334 case OMP_CLAUSE_FROM:
11335 oacc_firstprivate:
11336 var = OMP_CLAUSE_DECL (c);
11337 if (!DECL_P (var))
11339 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11340 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11341 && (OMP_CLAUSE_MAP_KIND (c)
11342 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11343 map_cnt++;
11344 continue;
11347 if (DECL_SIZE (var)
11348 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11350 tree var2 = DECL_VALUE_EXPR (var);
11351 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11352 var2 = TREE_OPERAND (var2, 0);
11353 gcc_assert (DECL_P (var2));
11354 var = var2;
11357 if (offloaded
11358 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11359 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11360 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11362 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11364 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11365 && varpool_node::get_create (var)->offloadable)
11366 continue;
11368 tree type = build_pointer_type (TREE_TYPE (var));
11369 tree new_var = lookup_decl (var, ctx);
11370 x = create_tmp_var_raw (type, get_name (new_var));
11371 gimple_add_tmp_var (x);
11372 x = build_simple_mem_ref (x);
11373 SET_DECL_VALUE_EXPR (new_var, x);
11374 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11376 continue;
11379 if (!maybe_lookup_field (var, ctx))
11380 continue;
11382 /* Don't remap oacc parallel reduction variables, because the
11383 intermediate result must be local to each gang. */
11384 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11385 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11387 x = build_receiver_ref (var, true, ctx);
11388 tree new_var = lookup_decl (var, ctx);
11390 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11391 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11392 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11393 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11394 x = build_simple_mem_ref (x);
11395 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11397 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11398 if (omp_is_reference (new_var)
11399 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11400 || DECL_BY_REFERENCE (var)))
11402 /* Create a local object to hold the instance
11403 value. */
11404 tree type = TREE_TYPE (TREE_TYPE (new_var));
11405 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11406 tree inst = create_tmp_var (type, id);
11407 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11408 x = build_fold_addr_expr (inst);
11410 gimplify_assign (new_var, x, &fplist);
11412 else if (DECL_P (new_var))
11414 SET_DECL_VALUE_EXPR (new_var, x);
11415 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11417 else
11418 gcc_unreachable ();
11420 map_cnt++;
11421 break;
11423 case OMP_CLAUSE_FIRSTPRIVATE:
11424 if (is_oacc_parallel (ctx))
11425 goto oacc_firstprivate;
11426 map_cnt++;
11427 var = OMP_CLAUSE_DECL (c);
11428 if (!omp_is_reference (var)
11429 && !is_gimple_reg_type (TREE_TYPE (var)))
11431 tree new_var = lookup_decl (var, ctx);
11432 if (is_variable_sized (var))
11434 tree pvar = DECL_VALUE_EXPR (var);
11435 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11436 pvar = TREE_OPERAND (pvar, 0);
11437 gcc_assert (DECL_P (pvar));
11438 tree new_pvar = lookup_decl (pvar, ctx);
11439 x = build_fold_indirect_ref (new_pvar);
11440 TREE_THIS_NOTRAP (x) = 1;
11442 else
11443 x = build_receiver_ref (var, true, ctx);
11444 SET_DECL_VALUE_EXPR (new_var, x);
11445 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11447 break;
11449 case OMP_CLAUSE_PRIVATE:
11450 if (is_gimple_omp_oacc (ctx->stmt))
11451 break;
11452 var = OMP_CLAUSE_DECL (c);
11453 if (is_variable_sized (var))
11455 tree new_var = lookup_decl (var, ctx);
11456 tree pvar = DECL_VALUE_EXPR (var);
11457 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11458 pvar = TREE_OPERAND (pvar, 0);
11459 gcc_assert (DECL_P (pvar));
11460 tree new_pvar = lookup_decl (pvar, ctx);
11461 x = build_fold_indirect_ref (new_pvar);
11462 TREE_THIS_NOTRAP (x) = 1;
11463 SET_DECL_VALUE_EXPR (new_var, x);
11464 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11466 break;
11468 case OMP_CLAUSE_USE_DEVICE_PTR:
11469 case OMP_CLAUSE_USE_DEVICE_ADDR:
11470 case OMP_CLAUSE_IS_DEVICE_PTR:
11471 var = OMP_CLAUSE_DECL (c);
11472 map_cnt++;
11473 if (is_variable_sized (var))
11475 tree new_var = lookup_decl (var, ctx);
11476 tree pvar = DECL_VALUE_EXPR (var);
11477 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11478 pvar = TREE_OPERAND (pvar, 0);
11479 gcc_assert (DECL_P (pvar));
11480 tree new_pvar = lookup_decl (pvar, ctx);
11481 x = build_fold_indirect_ref (new_pvar);
11482 TREE_THIS_NOTRAP (x) = 1;
11483 SET_DECL_VALUE_EXPR (new_var, x);
11484 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11486 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11487 && !omp_is_reference (var)
11488 && !omp_is_allocatable_or_ptr (var))
11489 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11491 tree new_var = lookup_decl (var, ctx);
11492 tree type = build_pointer_type (TREE_TYPE (var));
11493 x = create_tmp_var_raw (type, get_name (new_var));
11494 gimple_add_tmp_var (x);
11495 x = build_simple_mem_ref (x);
11496 SET_DECL_VALUE_EXPR (new_var, x);
11497 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11499 else
11501 tree new_var = lookup_decl (var, ctx);
11502 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11503 gimple_add_tmp_var (x);
11504 SET_DECL_VALUE_EXPR (new_var, x);
11505 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11507 break;
11510 if (offloaded)
11512 target_nesting_level++;
11513 lower_omp (&tgt_body, ctx);
11514 target_nesting_level--;
11516 else if (data_region)
11517 lower_omp (&tgt_body, ctx);
11519 if (offloaded)
11521 /* Declare all the variables created by mapping and the variables
11522 declared in the scope of the target body. */
11523 record_vars_into (ctx->block_vars, child_fn);
11524 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11525 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11528 olist = NULL;
11529 ilist = NULL;
11530 if (ctx->record_type)
11532 ctx->sender_decl
11533 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11534 DECL_NAMELESS (ctx->sender_decl) = 1;
11535 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11536 t = make_tree_vec (3);
11537 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11538 TREE_VEC_ELT (t, 1)
11539 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11540 ".omp_data_sizes");
11541 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11542 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11543 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11544 tree tkind_type = short_unsigned_type_node;
11545 int talign_shift = 8;
11546 TREE_VEC_ELT (t, 2)
11547 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11548 ".omp_data_kinds");
11549 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11550 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11551 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11552 gimple_omp_target_set_data_arg (stmt, t);
11554 vec<constructor_elt, va_gc> *vsize;
11555 vec<constructor_elt, va_gc> *vkind;
11556 vec_alloc (vsize, map_cnt);
11557 vec_alloc (vkind, map_cnt);
11558 unsigned int map_idx = 0;
11560 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11561 switch (OMP_CLAUSE_CODE (c))
11563 tree ovar, nc, s, purpose, var, x, type;
11564 unsigned int talign;
11566 default:
11567 break;
11569 case OMP_CLAUSE_MAP:
11570 case OMP_CLAUSE_TO:
11571 case OMP_CLAUSE_FROM:
11572 oacc_firstprivate_map:
11573 nc = c;
11574 ovar = OMP_CLAUSE_DECL (c);
11575 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11576 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11577 || (OMP_CLAUSE_MAP_KIND (c)
11578 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11579 break;
11580 if (!DECL_P (ovar))
11582 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11583 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11585 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11586 == get_base_address (ovar));
11587 nc = OMP_CLAUSE_CHAIN (c);
11588 ovar = OMP_CLAUSE_DECL (nc);
11590 else
11592 tree x = build_sender_ref (ovar, ctx);
11593 tree v
11594 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11595 gimplify_assign (x, v, &ilist);
11596 nc = NULL_TREE;
11599 else
11601 if (DECL_SIZE (ovar)
11602 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11604 tree ovar2 = DECL_VALUE_EXPR (ovar);
11605 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11606 ovar2 = TREE_OPERAND (ovar2, 0);
11607 gcc_assert (DECL_P (ovar2));
11608 ovar = ovar2;
11610 if (!maybe_lookup_field (ovar, ctx))
11611 continue;
11614 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11615 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11616 talign = DECL_ALIGN_UNIT (ovar);
11617 if (nc)
11619 var = lookup_decl_in_outer_ctx (ovar, ctx);
11620 x = build_sender_ref (ovar, ctx);
11622 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11623 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11624 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11625 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11627 gcc_assert (offloaded);
11628 tree avar
11629 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11630 mark_addressable (avar);
11631 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11632 talign = DECL_ALIGN_UNIT (avar);
11633 avar = build_fold_addr_expr (avar);
11634 gimplify_assign (x, avar, &ilist);
11636 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11638 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11639 if (!omp_is_reference (var))
11641 if (is_gimple_reg (var)
11642 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11643 TREE_NO_WARNING (var) = 1;
11644 var = build_fold_addr_expr (var);
11646 else
11647 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11648 gimplify_assign (x, var, &ilist);
11650 else if (is_gimple_reg (var))
11652 gcc_assert (offloaded);
11653 tree avar = create_tmp_var (TREE_TYPE (var));
11654 mark_addressable (avar);
11655 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11656 if (GOMP_MAP_COPY_TO_P (map_kind)
11657 || map_kind == GOMP_MAP_POINTER
11658 || map_kind == GOMP_MAP_TO_PSET
11659 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11661 /* If we need to initialize a temporary
11662 with VAR because it is not addressable, and
11663 the variable hasn't been initialized yet, then
11664 we'll get a warning for the store to avar.
11665 Don't warn in that case, the mapping might
11666 be implicit. */
11667 TREE_NO_WARNING (var) = 1;
11668 gimplify_assign (avar, var, &ilist);
11670 avar = build_fold_addr_expr (avar);
11671 gimplify_assign (x, avar, &ilist);
11672 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11673 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11674 && !TYPE_READONLY (TREE_TYPE (var)))
11676 x = unshare_expr (x);
11677 x = build_simple_mem_ref (x);
11678 gimplify_assign (var, x, &olist);
11681 else
11683 /* While MAP is handled explicitly by the FE,
11684 for 'target update', only the identified is passed. */
11685 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11686 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11687 && (omp_is_allocatable_or_ptr (var)
11688 && omp_is_optional_argument (var)))
11689 var = build_fold_indirect_ref (var);
11690 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11691 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11692 || (!omp_is_allocatable_or_ptr (var)
11693 && !omp_is_optional_argument (var)))
11694 var = build_fold_addr_expr (var);
11695 gimplify_assign (x, var, &ilist);
11698 s = NULL_TREE;
11699 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11701 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11702 s = TREE_TYPE (ovar);
11703 if (TREE_CODE (s) == REFERENCE_TYPE)
11704 s = TREE_TYPE (s);
11705 s = TYPE_SIZE_UNIT (s);
11707 else
11708 s = OMP_CLAUSE_SIZE (c);
11709 if (s == NULL_TREE)
11710 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11711 s = fold_convert (size_type_node, s);
11712 purpose = size_int (map_idx++);
11713 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11714 if (TREE_CODE (s) != INTEGER_CST)
11715 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11717 unsigned HOST_WIDE_INT tkind, tkind_zero;
11718 switch (OMP_CLAUSE_CODE (c))
11720 case OMP_CLAUSE_MAP:
11721 tkind = OMP_CLAUSE_MAP_KIND (c);
11722 tkind_zero = tkind;
11723 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11724 switch (tkind)
11726 case GOMP_MAP_ALLOC:
11727 case GOMP_MAP_TO:
11728 case GOMP_MAP_FROM:
11729 case GOMP_MAP_TOFROM:
11730 case GOMP_MAP_ALWAYS_TO:
11731 case GOMP_MAP_ALWAYS_FROM:
11732 case GOMP_MAP_ALWAYS_TOFROM:
11733 case GOMP_MAP_RELEASE:
11734 case GOMP_MAP_FORCE_TO:
11735 case GOMP_MAP_FORCE_FROM:
11736 case GOMP_MAP_FORCE_TOFROM:
11737 case GOMP_MAP_FORCE_PRESENT:
11738 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11739 break;
11740 case GOMP_MAP_DELETE:
11741 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11742 default:
11743 break;
11745 if (tkind_zero != tkind)
11747 if (integer_zerop (s))
11748 tkind = tkind_zero;
11749 else if (integer_nonzerop (s))
11750 tkind_zero = tkind;
11752 break;
11753 case OMP_CLAUSE_FIRSTPRIVATE:
11754 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11755 tkind = GOMP_MAP_TO;
11756 tkind_zero = tkind;
11757 break;
11758 case OMP_CLAUSE_TO:
11759 tkind = GOMP_MAP_TO;
11760 tkind_zero = tkind;
11761 break;
11762 case OMP_CLAUSE_FROM:
11763 tkind = GOMP_MAP_FROM;
11764 tkind_zero = tkind;
11765 break;
11766 default:
11767 gcc_unreachable ();
11769 gcc_checking_assert (tkind
11770 < (HOST_WIDE_INT_C (1U) << talign_shift));
11771 gcc_checking_assert (tkind_zero
11772 < (HOST_WIDE_INT_C (1U) << talign_shift));
11773 talign = ceil_log2 (talign);
11774 tkind |= talign << talign_shift;
11775 tkind_zero |= talign << talign_shift;
11776 gcc_checking_assert (tkind
11777 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11778 gcc_checking_assert (tkind_zero
11779 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11780 if (tkind == tkind_zero)
11781 x = build_int_cstu (tkind_type, tkind);
11782 else
11784 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11785 x = build3 (COND_EXPR, tkind_type,
11786 fold_build2 (EQ_EXPR, boolean_type_node,
11787 unshare_expr (s), size_zero_node),
11788 build_int_cstu (tkind_type, tkind_zero),
11789 build_int_cstu (tkind_type, tkind));
11791 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11792 if (nc && nc != c)
11793 c = nc;
11794 break;
11796 case OMP_CLAUSE_FIRSTPRIVATE:
11797 if (is_oacc_parallel (ctx))
11798 goto oacc_firstprivate_map;
11799 ovar = OMP_CLAUSE_DECL (c);
11800 if (omp_is_reference (ovar))
11801 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11802 else
11803 talign = DECL_ALIGN_UNIT (ovar);
11804 var = lookup_decl_in_outer_ctx (ovar, ctx);
11805 x = build_sender_ref (ovar, ctx);
11806 tkind = GOMP_MAP_FIRSTPRIVATE;
11807 type = TREE_TYPE (ovar);
11808 if (omp_is_reference (ovar))
11809 type = TREE_TYPE (type);
11810 if ((INTEGRAL_TYPE_P (type)
11811 && TYPE_PRECISION (type) <= POINTER_SIZE)
11812 || TREE_CODE (type) == POINTER_TYPE)
11814 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11815 tree t = var;
11816 if (omp_is_reference (var))
11817 t = build_simple_mem_ref (var);
11818 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11819 TREE_NO_WARNING (var) = 1;
11820 if (TREE_CODE (type) != POINTER_TYPE)
11821 t = fold_convert (pointer_sized_int_node, t);
11822 t = fold_convert (TREE_TYPE (x), t);
11823 gimplify_assign (x, t, &ilist);
11825 else if (omp_is_reference (var))
11826 gimplify_assign (x, var, &ilist);
11827 else if (is_gimple_reg (var))
11829 tree avar = create_tmp_var (TREE_TYPE (var));
11830 mark_addressable (avar);
11831 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11832 TREE_NO_WARNING (var) = 1;
11833 gimplify_assign (avar, var, &ilist);
11834 avar = build_fold_addr_expr (avar);
11835 gimplify_assign (x, avar, &ilist);
11837 else
11839 var = build_fold_addr_expr (var);
11840 gimplify_assign (x, var, &ilist);
11842 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11843 s = size_int (0);
11844 else if (omp_is_reference (ovar))
11845 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11846 else
11847 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11848 s = fold_convert (size_type_node, s);
11849 purpose = size_int (map_idx++);
11850 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11851 if (TREE_CODE (s) != INTEGER_CST)
11852 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11854 gcc_checking_assert (tkind
11855 < (HOST_WIDE_INT_C (1U) << talign_shift));
11856 talign = ceil_log2 (talign);
11857 tkind |= talign << talign_shift;
11858 gcc_checking_assert (tkind
11859 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11860 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11861 build_int_cstu (tkind_type, tkind));
11862 break;
11864 case OMP_CLAUSE_USE_DEVICE_PTR:
11865 case OMP_CLAUSE_USE_DEVICE_ADDR:
11866 case OMP_CLAUSE_IS_DEVICE_PTR:
11867 ovar = OMP_CLAUSE_DECL (c);
11868 var = lookup_decl_in_outer_ctx (ovar, ctx);
11869 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
11871 tkind = GOMP_MAP_USE_DEVICE_PTR;
11872 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
11874 else
11876 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11877 x = build_sender_ref (ovar, ctx);
11879 type = TREE_TYPE (ovar);
11880 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11881 && !omp_is_reference (ovar)
11882 && !omp_is_allocatable_or_ptr (ovar))
11883 || TREE_CODE (type) == ARRAY_TYPE)
11884 var = build_fold_addr_expr (var);
11885 else
11887 if (omp_is_reference (ovar)
11888 || omp_is_optional_argument (ovar)
11889 || omp_is_allocatable_or_ptr (ovar))
11891 type = TREE_TYPE (type);
11892 if (TREE_CODE (type) != ARRAY_TYPE
11893 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
11894 && !omp_is_allocatable_or_ptr (ovar))
11895 || (omp_is_reference (ovar)
11896 && omp_is_allocatable_or_ptr (ovar))))
11897 var = build_simple_mem_ref (var);
11898 var = fold_convert (TREE_TYPE (x), var);
11901 gimplify_assign (x, var, &ilist);
11902 s = size_int (0);
11903 purpose = size_int (map_idx++);
11904 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11905 gcc_checking_assert (tkind
11906 < (HOST_WIDE_INT_C (1U) << talign_shift));
11907 gcc_checking_assert (tkind
11908 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11909 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11910 build_int_cstu (tkind_type, tkind));
11911 break;
11914 gcc_assert (map_idx == map_cnt);
11916 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11917 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11918 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11919 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11920 for (int i = 1; i <= 2; i++)
11921 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11923 gimple_seq initlist = NULL;
11924 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11925 TREE_VEC_ELT (t, i)),
11926 &initlist, true, NULL_TREE);
11927 gimple_seq_add_seq (&ilist, initlist);
11929 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
11930 gimple_seq_add_stmt (&olist,
11931 gimple_build_assign (TREE_VEC_ELT (t, i),
11932 clobber));
11935 tree clobber = build_clobber (ctx->record_type);
11936 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11937 clobber));
11940 /* Once all the expansions are done, sequence all the different
11941 fragments inside gimple_omp_body. */
11943 new_body = NULL;
11945 if (offloaded
11946 && ctx->record_type)
11948 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11949 /* fixup_child_record_type might have changed receiver_decl's type. */
11950 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11951 gimple_seq_add_stmt (&new_body,
11952 gimple_build_assign (ctx->receiver_decl, t));
11954 gimple_seq_add_seq (&new_body, fplist);
11956 if (offloaded || data_region)
11958 tree prev = NULL_TREE;
11959 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11960 switch (OMP_CLAUSE_CODE (c))
11962 tree var, x;
11963 default:
11964 break;
11965 case OMP_CLAUSE_FIRSTPRIVATE:
11966 if (is_gimple_omp_oacc (ctx->stmt))
11967 break;
11968 var = OMP_CLAUSE_DECL (c);
11969 if (omp_is_reference (var)
11970 || is_gimple_reg_type (TREE_TYPE (var)))
11972 tree new_var = lookup_decl (var, ctx);
11973 tree type;
11974 type = TREE_TYPE (var);
11975 if (omp_is_reference (var))
11976 type = TREE_TYPE (type);
11977 if ((INTEGRAL_TYPE_P (type)
11978 && TYPE_PRECISION (type) <= POINTER_SIZE)
11979 || TREE_CODE (type) == POINTER_TYPE)
11981 x = build_receiver_ref (var, false, ctx);
11982 if (TREE_CODE (type) != POINTER_TYPE)
11983 x = fold_convert (pointer_sized_int_node, x);
11984 x = fold_convert (type, x);
11985 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11986 fb_rvalue);
11987 if (omp_is_reference (var))
11989 tree v = create_tmp_var_raw (type, get_name (var));
11990 gimple_add_tmp_var (v);
11991 TREE_ADDRESSABLE (v) = 1;
11992 gimple_seq_add_stmt (&new_body,
11993 gimple_build_assign (v, x));
11994 x = build_fold_addr_expr (v);
11996 gimple_seq_add_stmt (&new_body,
11997 gimple_build_assign (new_var, x));
11999 else
12001 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12002 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12003 fb_rvalue);
12004 gimple_seq_add_stmt (&new_body,
12005 gimple_build_assign (new_var, x));
12008 else if (is_variable_sized (var))
12010 tree pvar = DECL_VALUE_EXPR (var);
12011 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12012 pvar = TREE_OPERAND (pvar, 0);
12013 gcc_assert (DECL_P (pvar));
12014 tree new_var = lookup_decl (pvar, ctx);
12015 x = build_receiver_ref (var, false, ctx);
12016 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12017 gimple_seq_add_stmt (&new_body,
12018 gimple_build_assign (new_var, x));
12020 break;
12021 case OMP_CLAUSE_PRIVATE:
12022 if (is_gimple_omp_oacc (ctx->stmt))
12023 break;
12024 var = OMP_CLAUSE_DECL (c);
12025 if (omp_is_reference (var))
12027 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12028 tree new_var = lookup_decl (var, ctx);
12029 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12030 if (TREE_CONSTANT (x))
12032 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12033 get_name (var));
12034 gimple_add_tmp_var (x);
12035 TREE_ADDRESSABLE (x) = 1;
12036 x = build_fold_addr_expr_loc (clause_loc, x);
12038 else
12039 break;
12041 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12042 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12043 gimple_seq_add_stmt (&new_body,
12044 gimple_build_assign (new_var, x));
12046 break;
12047 case OMP_CLAUSE_USE_DEVICE_PTR:
12048 case OMP_CLAUSE_USE_DEVICE_ADDR:
12049 case OMP_CLAUSE_IS_DEVICE_PTR:
12050 var = OMP_CLAUSE_DECL (c);
12051 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12052 x = build_sender_ref ((splay_tree_key) &DECL_UID (var), ctx);
12053 else
12054 x = build_receiver_ref (var, false, ctx);
12055 if (is_variable_sized (var))
12057 tree pvar = DECL_VALUE_EXPR (var);
12058 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12059 pvar = TREE_OPERAND (pvar, 0);
12060 gcc_assert (DECL_P (pvar));
12061 tree new_var = lookup_decl (pvar, ctx);
12062 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12063 gimple_seq_add_stmt (&new_body,
12064 gimple_build_assign (new_var, x));
12066 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12067 && !omp_is_reference (var)
12068 && !omp_is_allocatable_or_ptr (var))
12069 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12071 tree new_var = lookup_decl (var, ctx);
12072 new_var = DECL_VALUE_EXPR (new_var);
12073 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12074 new_var = TREE_OPERAND (new_var, 0);
12075 gcc_assert (DECL_P (new_var));
12076 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12077 gimple_seq_add_stmt (&new_body,
12078 gimple_build_assign (new_var, x));
12080 else
12082 tree type = TREE_TYPE (var);
12083 tree new_var = lookup_decl (var, ctx);
12084 if (omp_is_reference (var))
12086 type = TREE_TYPE (type);
12087 if (TREE_CODE (type) != ARRAY_TYPE
12088 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12089 || (omp_is_reference (var)
12090 && omp_is_allocatable_or_ptr (var))))
12092 tree v = create_tmp_var_raw (type, get_name (var));
12093 gimple_add_tmp_var (v);
12094 TREE_ADDRESSABLE (v) = 1;
12095 x = fold_convert (type, x);
12096 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12097 fb_rvalue);
12098 gimple_seq_add_stmt (&new_body,
12099 gimple_build_assign (v, x));
12100 x = build_fold_addr_expr (v);
12103 new_var = DECL_VALUE_EXPR (new_var);
12104 x = fold_convert (TREE_TYPE (new_var), x);
12105 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12106 gimple_seq_add_stmt (&new_body,
12107 gimple_build_assign (new_var, x));
12109 break;
12111 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12112 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12113 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12114 or references to VLAs. */
12115 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12116 switch (OMP_CLAUSE_CODE (c))
12118 tree var;
12119 default:
12120 break;
12121 case OMP_CLAUSE_MAP:
12122 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12123 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12125 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12126 poly_int64 offset = 0;
12127 gcc_assert (prev);
12128 var = OMP_CLAUSE_DECL (c);
12129 if (DECL_P (var)
12130 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12131 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12132 ctx))
12133 && varpool_node::get_create (var)->offloadable)
12134 break;
12135 if (TREE_CODE (var) == INDIRECT_REF
12136 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12137 var = TREE_OPERAND (var, 0);
12138 if (TREE_CODE (var) == COMPONENT_REF)
12140 var = get_addr_base_and_unit_offset (var, &offset);
12141 gcc_assert (var != NULL_TREE && DECL_P (var));
12143 else if (DECL_SIZE (var)
12144 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12146 tree var2 = DECL_VALUE_EXPR (var);
12147 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12148 var2 = TREE_OPERAND (var2, 0);
12149 gcc_assert (DECL_P (var2));
12150 var = var2;
12152 tree new_var = lookup_decl (var, ctx), x;
12153 tree type = TREE_TYPE (new_var);
12154 bool is_ref;
12155 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12156 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12157 == COMPONENT_REF))
12159 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12160 is_ref = true;
12161 new_var = build2 (MEM_REF, type,
12162 build_fold_addr_expr (new_var),
12163 build_int_cst (build_pointer_type (type),
12164 offset));
12166 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12168 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12169 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12170 new_var = build2 (MEM_REF, type,
12171 build_fold_addr_expr (new_var),
12172 build_int_cst (build_pointer_type (type),
12173 offset));
12175 else
12176 is_ref = omp_is_reference (var);
12177 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12178 is_ref = false;
12179 bool ref_to_array = false;
12180 if (is_ref)
12182 type = TREE_TYPE (type);
12183 if (TREE_CODE (type) == ARRAY_TYPE)
12185 type = build_pointer_type (type);
12186 ref_to_array = true;
12189 else if (TREE_CODE (type) == ARRAY_TYPE)
12191 tree decl2 = DECL_VALUE_EXPR (new_var);
12192 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12193 decl2 = TREE_OPERAND (decl2, 0);
12194 gcc_assert (DECL_P (decl2));
12195 new_var = decl2;
12196 type = TREE_TYPE (new_var);
12198 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12199 x = fold_convert_loc (clause_loc, type, x);
12200 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12202 tree bias = OMP_CLAUSE_SIZE (c);
12203 if (DECL_P (bias))
12204 bias = lookup_decl (bias, ctx);
12205 bias = fold_convert_loc (clause_loc, sizetype, bias);
12206 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12207 bias);
12208 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12209 TREE_TYPE (x), x, bias);
12211 if (ref_to_array)
12212 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12213 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12214 if (is_ref && !ref_to_array)
12216 tree t = create_tmp_var_raw (type, get_name (var));
12217 gimple_add_tmp_var (t);
12218 TREE_ADDRESSABLE (t) = 1;
12219 gimple_seq_add_stmt (&new_body,
12220 gimple_build_assign (t, x));
12221 x = build_fold_addr_expr_loc (clause_loc, t);
12223 gimple_seq_add_stmt (&new_body,
12224 gimple_build_assign (new_var, x));
12225 prev = NULL_TREE;
12227 else if (OMP_CLAUSE_CHAIN (c)
12228 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12229 == OMP_CLAUSE_MAP
12230 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12231 == GOMP_MAP_FIRSTPRIVATE_POINTER
12232 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12233 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12234 prev = c;
12235 break;
12236 case OMP_CLAUSE_PRIVATE:
12237 var = OMP_CLAUSE_DECL (c);
12238 if (is_variable_sized (var))
12240 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12241 tree new_var = lookup_decl (var, ctx);
12242 tree pvar = DECL_VALUE_EXPR (var);
12243 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12244 pvar = TREE_OPERAND (pvar, 0);
12245 gcc_assert (DECL_P (pvar));
12246 tree new_pvar = lookup_decl (pvar, ctx);
12247 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12248 tree al = size_int (DECL_ALIGN (var));
12249 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12250 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12251 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12252 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12253 gimple_seq_add_stmt (&new_body,
12254 gimple_build_assign (new_pvar, x));
12256 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12258 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12259 tree new_var = lookup_decl (var, ctx);
12260 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12261 if (TREE_CONSTANT (x))
12262 break;
12263 else
12265 tree atmp
12266 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12267 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12268 tree al = size_int (TYPE_ALIGN (rtype));
12269 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12272 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12273 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12274 gimple_seq_add_stmt (&new_body,
12275 gimple_build_assign (new_var, x));
12277 break;
12280 gimple_seq fork_seq = NULL;
12281 gimple_seq join_seq = NULL;
12283 if (is_oacc_parallel (ctx))
12285 /* If there are reductions on the offloaded region itself, treat
12286 them as a dummy GANG loop. */
12287 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12289 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12290 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12293 gimple_seq_add_seq (&new_body, fork_seq);
12294 gimple_seq_add_seq (&new_body, tgt_body);
12295 gimple_seq_add_seq (&new_body, join_seq);
12297 if (offloaded)
12298 new_body = maybe_catch_exception (new_body);
12300 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12301 gimple_omp_set_body (stmt, new_body);
12304 bind = gimple_build_bind (NULL, NULL,
12305 tgt_bind ? gimple_bind_block (tgt_bind)
12306 : NULL_TREE);
12307 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12308 gimple_bind_add_seq (bind, ilist);
12309 gimple_bind_add_stmt (bind, stmt);
12310 gimple_bind_add_seq (bind, olist);
12312 pop_gimplify_context (NULL);
12314 if (dep_bind)
12316 gimple_bind_add_seq (dep_bind, dep_ilist);
12317 gimple_bind_add_stmt (dep_bind, bind);
12318 gimple_bind_add_seq (dep_bind, dep_olist);
12319 pop_gimplify_context (dep_bind);
12323 /* Expand code for an OpenMP teams directive. */
12325 static void
12326 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12328 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12329 push_gimplify_context ();
12331 tree block = make_node (BLOCK);
12332 gbind *bind = gimple_build_bind (NULL, NULL, block);
12333 gsi_replace (gsi_p, bind, true);
12334 gimple_seq bind_body = NULL;
12335 gimple_seq dlist = NULL;
12336 gimple_seq olist = NULL;
12338 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12339 OMP_CLAUSE_NUM_TEAMS);
12340 if (num_teams == NULL_TREE)
12341 num_teams = build_int_cst (unsigned_type_node, 0);
12342 else
12344 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12345 num_teams = fold_convert (unsigned_type_node, num_teams);
12346 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12348 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12349 OMP_CLAUSE_THREAD_LIMIT);
12350 if (thread_limit == NULL_TREE)
12351 thread_limit = build_int_cst (unsigned_type_node, 0);
12352 else
12354 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12355 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12356 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12357 fb_rvalue);
12360 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12361 &bind_body, &dlist, ctx, NULL);
12362 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12363 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12364 NULL, ctx);
12365 if (!gimple_omp_teams_grid_phony (teams_stmt))
12367 gimple_seq_add_stmt (&bind_body, teams_stmt);
12368 location_t loc = gimple_location (teams_stmt);
12369 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12370 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12371 gimple_set_location (call, loc);
12372 gimple_seq_add_stmt (&bind_body, call);
12375 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12376 gimple_omp_set_body (teams_stmt, NULL);
12377 gimple_seq_add_seq (&bind_body, olist);
12378 gimple_seq_add_seq (&bind_body, dlist);
12379 if (!gimple_omp_teams_grid_phony (teams_stmt))
12380 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12381 gimple_bind_set_body (bind, bind_body);
12383 pop_gimplify_context (bind);
12385 gimple_bind_append_vars (bind, ctx->block_vars);
12386 BLOCK_VARS (block) = ctx->block_vars;
12387 if (BLOCK_VARS (block))
12388 TREE_USED (block) = 1;
12391 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12393 static void
12394 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12396 gimple *stmt = gsi_stmt (*gsi_p);
12397 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12398 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12399 gimple_build_omp_return (false));
12403 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12404 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12405 of OMP context, but with task_shared_vars set. */
12407 static tree
12408 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12409 void *data)
12411 tree t = *tp;
12413 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12414 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12415 return t;
12417 if (task_shared_vars
12418 && DECL_P (t)
12419 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12420 return t;
12422 /* If a global variable has been privatized, TREE_CONSTANT on
12423 ADDR_EXPR might be wrong. */
12424 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12425 recompute_tree_invariant_for_addr_expr (t);
12427 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12428 return NULL_TREE;
12431 /* Data to be communicated between lower_omp_regimplify_operands and
12432 lower_omp_regimplify_operands_p. */
12434 struct lower_omp_regimplify_operands_data
12436 omp_context *ctx;
12437 vec<tree> *decls;
12440 /* Helper function for lower_omp_regimplify_operands. Find
12441 omp_member_access_dummy_var vars and adjust temporarily their
12442 DECL_VALUE_EXPRs if needed. */
12444 static tree
12445 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12446 void *data)
12448 tree t = omp_member_access_dummy_var (*tp);
12449 if (t)
12451 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12452 lower_omp_regimplify_operands_data *ldata
12453 = (lower_omp_regimplify_operands_data *) wi->info;
12454 tree o = maybe_lookup_decl (t, ldata->ctx);
12455 if (o != t)
12457 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12458 ldata->decls->safe_push (*tp);
12459 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12460 SET_DECL_VALUE_EXPR (*tp, v);
12463 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12464 return NULL_TREE;
12467 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12468 of omp_member_access_dummy_var vars during regimplification. */
12470 static void
12471 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12472 gimple_stmt_iterator *gsi_p)
12474 auto_vec<tree, 10> decls;
12475 if (ctx)
12477 struct walk_stmt_info wi;
12478 memset (&wi, '\0', sizeof (wi));
12479 struct lower_omp_regimplify_operands_data data;
12480 data.ctx = ctx;
12481 data.decls = &decls;
12482 wi.info = &data;
12483 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12485 gimple_regimplify_operands (stmt, gsi_p);
12486 while (!decls.is_empty ())
12488 tree t = decls.pop ();
12489 tree v = decls.pop ();
12490 SET_DECL_VALUE_EXPR (t, v);
12494 static void
12495 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12497 gimple *stmt = gsi_stmt (*gsi_p);
12498 struct walk_stmt_info wi;
12499 gcall *call_stmt;
12501 if (gimple_has_location (stmt))
12502 input_location = gimple_location (stmt);
12504 if (task_shared_vars)
12505 memset (&wi, '\0', sizeof (wi));
12507 /* If we have issued syntax errors, avoid doing any heavy lifting.
12508 Just replace the OMP directives with a NOP to avoid
12509 confusing RTL expansion. */
12510 if (seen_error () && is_gimple_omp (stmt))
12512 gsi_replace (gsi_p, gimple_build_nop (), true);
12513 return;
12516 switch (gimple_code (stmt))
12518 case GIMPLE_COND:
12520 gcond *cond_stmt = as_a <gcond *> (stmt);
12521 if ((ctx || task_shared_vars)
12522 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12523 lower_omp_regimplify_p,
12524 ctx ? NULL : &wi, NULL)
12525 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12526 lower_omp_regimplify_p,
12527 ctx ? NULL : &wi, NULL)))
12528 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12530 break;
12531 case GIMPLE_CATCH:
12532 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12533 break;
12534 case GIMPLE_EH_FILTER:
12535 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12536 break;
12537 case GIMPLE_TRY:
12538 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12539 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12540 break;
12541 case GIMPLE_TRANSACTION:
12542 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12543 ctx);
12544 break;
12545 case GIMPLE_BIND:
12546 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12547 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12548 break;
12549 case GIMPLE_OMP_PARALLEL:
12550 case GIMPLE_OMP_TASK:
12551 ctx = maybe_lookup_ctx (stmt);
12552 gcc_assert (ctx);
12553 if (ctx->cancellable)
12554 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12555 lower_omp_taskreg (gsi_p, ctx);
12556 break;
12557 case GIMPLE_OMP_FOR:
12558 ctx = maybe_lookup_ctx (stmt);
12559 gcc_assert (ctx);
12560 if (ctx->cancellable)
12561 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12562 lower_omp_for (gsi_p, ctx);
12563 break;
12564 case GIMPLE_OMP_SECTIONS:
12565 ctx = maybe_lookup_ctx (stmt);
12566 gcc_assert (ctx);
12567 if (ctx->cancellable)
12568 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12569 lower_omp_sections (gsi_p, ctx);
12570 break;
12571 case GIMPLE_OMP_SINGLE:
12572 ctx = maybe_lookup_ctx (stmt);
12573 gcc_assert (ctx);
12574 lower_omp_single (gsi_p, ctx);
12575 break;
12576 case GIMPLE_OMP_MASTER:
12577 ctx = maybe_lookup_ctx (stmt);
12578 gcc_assert (ctx);
12579 lower_omp_master (gsi_p, ctx);
12580 break;
12581 case GIMPLE_OMP_TASKGROUP:
12582 ctx = maybe_lookup_ctx (stmt);
12583 gcc_assert (ctx);
12584 lower_omp_taskgroup (gsi_p, ctx);
12585 break;
12586 case GIMPLE_OMP_ORDERED:
12587 ctx = maybe_lookup_ctx (stmt);
12588 gcc_assert (ctx);
12589 lower_omp_ordered (gsi_p, ctx);
12590 break;
12591 case GIMPLE_OMP_SCAN:
12592 ctx = maybe_lookup_ctx (stmt);
12593 gcc_assert (ctx);
12594 lower_omp_scan (gsi_p, ctx);
12595 break;
12596 case GIMPLE_OMP_CRITICAL:
12597 ctx = maybe_lookup_ctx (stmt);
12598 gcc_assert (ctx);
12599 lower_omp_critical (gsi_p, ctx);
12600 break;
12601 case GIMPLE_OMP_ATOMIC_LOAD:
12602 if ((ctx || task_shared_vars)
12603 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12604 as_a <gomp_atomic_load *> (stmt)),
12605 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12606 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12607 break;
12608 case GIMPLE_OMP_TARGET:
12609 ctx = maybe_lookup_ctx (stmt);
12610 gcc_assert (ctx);
12611 lower_omp_target (gsi_p, ctx);
12612 break;
12613 case GIMPLE_OMP_TEAMS:
12614 ctx = maybe_lookup_ctx (stmt);
12615 gcc_assert (ctx);
12616 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12617 lower_omp_taskreg (gsi_p, ctx);
12618 else
12619 lower_omp_teams (gsi_p, ctx);
12620 break;
12621 case GIMPLE_OMP_GRID_BODY:
12622 ctx = maybe_lookup_ctx (stmt);
12623 gcc_assert (ctx);
12624 lower_omp_grid_body (gsi_p, ctx);
12625 break;
12626 case GIMPLE_CALL:
12627 tree fndecl;
12628 call_stmt = as_a <gcall *> (stmt);
12629 fndecl = gimple_call_fndecl (call_stmt);
12630 if (fndecl
12631 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12632 switch (DECL_FUNCTION_CODE (fndecl))
12634 case BUILT_IN_GOMP_BARRIER:
12635 if (ctx == NULL)
12636 break;
12637 /* FALLTHRU */
12638 case BUILT_IN_GOMP_CANCEL:
12639 case BUILT_IN_GOMP_CANCELLATION_POINT:
12640 omp_context *cctx;
12641 cctx = ctx;
12642 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12643 cctx = cctx->outer;
12644 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12645 if (!cctx->cancellable)
12647 if (DECL_FUNCTION_CODE (fndecl)
12648 == BUILT_IN_GOMP_CANCELLATION_POINT)
12650 stmt = gimple_build_nop ();
12651 gsi_replace (gsi_p, stmt, false);
12653 break;
12655 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12657 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12658 gimple_call_set_fndecl (call_stmt, fndecl);
12659 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12661 tree lhs;
12662 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12663 gimple_call_set_lhs (call_stmt, lhs);
12664 tree fallthru_label;
12665 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12666 gimple *g;
12667 g = gimple_build_label (fallthru_label);
12668 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12669 g = gimple_build_cond (NE_EXPR, lhs,
12670 fold_convert (TREE_TYPE (lhs),
12671 boolean_false_node),
12672 cctx->cancel_label, fallthru_label);
12673 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12674 break;
12675 default:
12676 break;
12678 goto regimplify;
12680 case GIMPLE_ASSIGN:
12681 for (omp_context *up = ctx; up; up = up->outer)
12683 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12684 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12685 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12686 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12687 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12688 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12689 && (gimple_omp_target_kind (up->stmt)
12690 == GF_OMP_TARGET_KIND_DATA)))
12691 continue;
12692 else if (!up->lastprivate_conditional_map)
12693 break;
12694 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12695 if (TREE_CODE (lhs) == MEM_REF
12696 && DECL_P (TREE_OPERAND (lhs, 0))
12697 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12698 0))) == REFERENCE_TYPE)
12699 lhs = TREE_OPERAND (lhs, 0);
12700 if (DECL_P (lhs))
12701 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12703 tree clauses;
12704 if (up->combined_into_simd_safelen1)
12706 up = up->outer;
12707 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12708 up = up->outer;
12710 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12711 clauses = gimple_omp_for_clauses (up->stmt);
12712 else
12713 clauses = gimple_omp_sections_clauses (up->stmt);
12714 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12715 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12716 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12717 OMP_CLAUSE__CONDTEMP_);
12718 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12719 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12720 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12723 /* FALLTHRU */
12725 default:
12726 regimplify:
12727 if ((ctx || task_shared_vars)
12728 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12729 ctx ? NULL : &wi))
12731 /* Just remove clobbers, this should happen only if we have
12732 "privatized" local addressable variables in SIMD regions,
12733 the clobber isn't needed in that case and gimplifying address
12734 of the ARRAY_REF into a pointer and creating MEM_REF based
12735 clobber would create worse code than we get with the clobber
12736 dropped. */
12737 if (gimple_clobber_p (stmt))
12739 gsi_replace (gsi_p, gimple_build_nop (), true);
12740 break;
12742 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12744 break;
12748 static void
12749 lower_omp (gimple_seq *body, omp_context *ctx)
12751 location_t saved_location = input_location;
12752 gimple_stmt_iterator gsi;
12753 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12754 lower_omp_1 (&gsi, ctx);
12755 /* During gimplification, we haven't folded statments inside offloading
12756 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12757 if (target_nesting_level || taskreg_nesting_level)
12758 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12759 fold_stmt (&gsi);
12760 input_location = saved_location;
12763 /* Main entry point. */
12765 static unsigned int
12766 execute_lower_omp (void)
12768 gimple_seq body;
12769 int i;
12770 omp_context *ctx;
12772 /* This pass always runs, to provide PROP_gimple_lomp.
12773 But often, there is nothing to do. */
12774 if (flag_openacc == 0 && flag_openmp == 0
12775 && flag_openmp_simd == 0)
12776 return 0;
12778 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12779 delete_omp_context);
12781 body = gimple_body (current_function_decl);
12783 if (hsa_gen_requested_p ())
12784 omp_grid_gridify_all_targets (&body);
12786 scan_omp (&body, NULL);
12787 gcc_assert (taskreg_nesting_level == 0);
12788 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12789 finish_taskreg_scan (ctx);
12790 taskreg_contexts.release ();
12792 if (all_contexts->root)
12794 if (task_shared_vars)
12795 push_gimplify_context ();
12796 lower_omp (&body, NULL);
12797 if (task_shared_vars)
12798 pop_gimplify_context (NULL);
12801 if (all_contexts)
12803 splay_tree_delete (all_contexts);
12804 all_contexts = NULL;
12806 BITMAP_FREE (task_shared_vars);
12807 BITMAP_FREE (global_nonaddressable_vars);
12809 /* If current function is a method, remove artificial dummy VAR_DECL created
12810 for non-static data member privatization, they aren't needed for
12811 debuginfo nor anything else, have been already replaced everywhere in the
12812 IL and cause problems with LTO. */
12813 if (DECL_ARGUMENTS (current_function_decl)
12814 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12815 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12816 == POINTER_TYPE))
12817 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
12818 return 0;
12821 namespace {
12823 const pass_data pass_data_lower_omp =
12825 GIMPLE_PASS, /* type */
12826 "omplower", /* name */
12827 OPTGROUP_OMP, /* optinfo_flags */
12828 TV_NONE, /* tv_id */
12829 PROP_gimple_any, /* properties_required */
12830 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12831 0, /* properties_destroyed */
12832 0, /* todo_flags_start */
12833 0, /* todo_flags_finish */
12836 class pass_lower_omp : public gimple_opt_pass
12838 public:
12839 pass_lower_omp (gcc::context *ctxt)
12840 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12843 /* opt_pass methods: */
12844 virtual unsigned int execute (function *) { return execute_lower_omp (); }
12846 }; // class pass_lower_omp
12848 } // anon namespace
12850 gimple_opt_pass *
12851 make_pass_lower_omp (gcc::context *ctxt)
12853 return new pass_lower_omp (ctxt);
12856 /* The following is a utility to diagnose structured block violations.
12857 It is not part of the "omplower" pass, as that's invoked too late. It
12858 should be invoked by the respective front ends after gimplification. */
12860 static splay_tree all_labels;
12862 /* Check for mismatched contexts and generate an error if needed. Return
12863 true if an error is detected. */
12865 static bool
12866 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12867 gimple *branch_ctx, gimple *label_ctx)
12869 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12870 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
12872 if (label_ctx == branch_ctx)
12873 return false;
12875 const char* kind = NULL;
12877 if (flag_openacc)
12879 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12880 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
12882 gcc_checking_assert (kind == NULL);
12883 kind = "OpenACC";
12886 if (kind == NULL)
12888 gcc_checking_assert (flag_openmp || flag_openmp_simd);
12889 kind = "OpenMP";
12892 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12893 so we could traverse it and issue a correct "exit" or "enter" error
12894 message upon a structured block violation.
12896 We built the context by building a list with tree_cons'ing, but there is
12897 no easy counterpart in gimple tuples. It seems like far too much work
12898 for issuing exit/enter error messages. If someone really misses the
12899 distinct error message... patches welcome. */
12901 #if 0
12902 /* Try to avoid confusing the user by producing and error message
12903 with correct "exit" or "enter" verbiage. We prefer "exit"
12904 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12905 if (branch_ctx == NULL)
12906 exit_p = false;
12907 else
12909 while (label_ctx)
12911 if (TREE_VALUE (label_ctx) == branch_ctx)
12913 exit_p = false;
12914 break;
12916 label_ctx = TREE_CHAIN (label_ctx);
12920 if (exit_p)
12921 error ("invalid exit from %s structured block", kind);
12922 else
12923 error ("invalid entry to %s structured block", kind);
12924 #endif
12926 /* If it's obvious we have an invalid entry, be specific about the error. */
12927 if (branch_ctx == NULL)
12928 error ("invalid entry to %s structured block", kind);
12929 else
12931 /* Otherwise, be vague and lazy, but efficient. */
12932 error ("invalid branch to/from %s structured block", kind);
12935 gsi_replace (gsi_p, gimple_build_nop (), false);
12936 return true;
12939 /* Pass 1: Create a minimal tree of structured blocks, and record
12940 where each label is found. */
12942 static tree
12943 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12944 struct walk_stmt_info *wi)
12946 gimple *context = (gimple *) wi->info;
12947 gimple *inner_context;
12948 gimple *stmt = gsi_stmt (*gsi_p);
12950 *handled_ops_p = true;
12952 switch (gimple_code (stmt))
12954 WALK_SUBSTMTS;
12956 case GIMPLE_OMP_PARALLEL:
12957 case GIMPLE_OMP_TASK:
12958 case GIMPLE_OMP_SECTIONS:
12959 case GIMPLE_OMP_SINGLE:
12960 case GIMPLE_OMP_SECTION:
12961 case GIMPLE_OMP_MASTER:
12962 case GIMPLE_OMP_ORDERED:
12963 case GIMPLE_OMP_SCAN:
12964 case GIMPLE_OMP_CRITICAL:
12965 case GIMPLE_OMP_TARGET:
12966 case GIMPLE_OMP_TEAMS:
12967 case GIMPLE_OMP_TASKGROUP:
12968 /* The minimal context here is just the current OMP construct. */
12969 inner_context = stmt;
12970 wi->info = inner_context;
12971 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12972 wi->info = context;
12973 break;
12975 case GIMPLE_OMP_FOR:
12976 inner_context = stmt;
12977 wi->info = inner_context;
12978 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12979 walk them. */
12980 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12981 diagnose_sb_1, NULL, wi);
12982 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12983 wi->info = context;
12984 break;
12986 case GIMPLE_LABEL:
12987 splay_tree_insert (all_labels,
12988 (splay_tree_key) gimple_label_label (
12989 as_a <glabel *> (stmt)),
12990 (splay_tree_value) context);
12991 break;
12993 default:
12994 break;
12997 return NULL_TREE;
13000 /* Pass 2: Check each branch and see if its context differs from that of
13001 the destination label's context. */
13003 static tree
13004 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13005 struct walk_stmt_info *wi)
13007 gimple *context = (gimple *) wi->info;
13008 splay_tree_node n;
13009 gimple *stmt = gsi_stmt (*gsi_p);
13011 *handled_ops_p = true;
13013 switch (gimple_code (stmt))
13015 WALK_SUBSTMTS;
13017 case GIMPLE_OMP_PARALLEL:
13018 case GIMPLE_OMP_TASK:
13019 case GIMPLE_OMP_SECTIONS:
13020 case GIMPLE_OMP_SINGLE:
13021 case GIMPLE_OMP_SECTION:
13022 case GIMPLE_OMP_MASTER:
13023 case GIMPLE_OMP_ORDERED:
13024 case GIMPLE_OMP_SCAN:
13025 case GIMPLE_OMP_CRITICAL:
13026 case GIMPLE_OMP_TARGET:
13027 case GIMPLE_OMP_TEAMS:
13028 case GIMPLE_OMP_TASKGROUP:
13029 wi->info = stmt;
13030 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13031 wi->info = context;
13032 break;
13034 case GIMPLE_OMP_FOR:
13035 wi->info = stmt;
13036 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13037 walk them. */
13038 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13039 diagnose_sb_2, NULL, wi);
13040 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13041 wi->info = context;
13042 break;
13044 case GIMPLE_COND:
13046 gcond *cond_stmt = as_a <gcond *> (stmt);
13047 tree lab = gimple_cond_true_label (cond_stmt);
13048 if (lab)
13050 n = splay_tree_lookup (all_labels,
13051 (splay_tree_key) lab);
13052 diagnose_sb_0 (gsi_p, context,
13053 n ? (gimple *) n->value : NULL);
13055 lab = gimple_cond_false_label (cond_stmt);
13056 if (lab)
13058 n = splay_tree_lookup (all_labels,
13059 (splay_tree_key) lab);
13060 diagnose_sb_0 (gsi_p, context,
13061 n ? (gimple *) n->value : NULL);
13064 break;
13066 case GIMPLE_GOTO:
13068 tree lab = gimple_goto_dest (stmt);
13069 if (TREE_CODE (lab) != LABEL_DECL)
13070 break;
13072 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13073 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13075 break;
13077 case GIMPLE_SWITCH:
13079 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13080 unsigned int i;
13081 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13083 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13084 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13085 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13086 break;
13089 break;
13091 case GIMPLE_RETURN:
13092 diagnose_sb_0 (gsi_p, context, NULL);
13093 break;
13095 default:
13096 break;
13099 return NULL_TREE;
13102 static unsigned int
13103 diagnose_omp_structured_block_errors (void)
13105 struct walk_stmt_info wi;
13106 gimple_seq body = gimple_body (current_function_decl);
13108 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13110 memset (&wi, 0, sizeof (wi));
13111 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13113 memset (&wi, 0, sizeof (wi));
13114 wi.want_locations = true;
13115 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13117 gimple_set_body (current_function_decl, body);
13119 splay_tree_delete (all_labels);
13120 all_labels = NULL;
13122 return 0;
13125 namespace {
13127 const pass_data pass_data_diagnose_omp_blocks =
13129 GIMPLE_PASS, /* type */
13130 "*diagnose_omp_blocks", /* name */
13131 OPTGROUP_OMP, /* optinfo_flags */
13132 TV_NONE, /* tv_id */
13133 PROP_gimple_any, /* properties_required */
13134 0, /* properties_provided */
13135 0, /* properties_destroyed */
13136 0, /* todo_flags_start */
13137 0, /* todo_flags_finish */
13140 class pass_diagnose_omp_blocks : public gimple_opt_pass
13142 public:
13143 pass_diagnose_omp_blocks (gcc::context *ctxt)
13144 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13147 /* opt_pass methods: */
13148 virtual bool gate (function *)
13150 return flag_openacc || flag_openmp || flag_openmp_simd;
13152 virtual unsigned int execute (function *)
13154 return diagnose_omp_structured_block_errors ();
13157 }; // class pass_diagnose_omp_blocks
13159 } // anon namespace
13161 gimple_opt_pass *
13162 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13164 return new pass_diagnose_omp_blocks (ctxt);
13168 #include "gt-omp-low.h"