[RS6000] Power10 vec-splati-runnable multiple test failures
[official-gcc.git] / gcc / omp-low.c
blob6d0aa8daeb3f16567700bdae2bc1227350d3dce4
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
77 struct omp_context
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses;
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses;
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
144 int depth;
146 /* True if this parallel directive is nested within another. */
147 bool is_nested;
149 /* True if this construct can be cancelled. */
150 bool cancellable;
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
153 context. */
154 bool combined_into_simd_safelen1;
156 /* True if there is nested scan context with inclusive clause. */
157 bool scan_inclusive;
159 /* True if there is nested scan context with exclusive clause. */
160 bool scan_exclusive;
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase;
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent;
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
169 bool loop_p;
172 static splay_tree all_contexts;
173 static int taskreg_nesting_level;
174 static int target_nesting_level;
175 static bitmap task_shared_vars;
176 static bitmap global_nonaddressable_vars;
177 static vec<omp_context *> taskreg_contexts;
179 static void scan_omp (gimple_seq *, omp_context *);
180 static tree scan_omp_1_op (tree *, int *, void *);
182 #define WALK_SUBSTMTS \
183 case GIMPLE_BIND: \
184 case GIMPLE_TRY: \
185 case GIMPLE_CATCH: \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
190 break;
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
193 region. */
195 static bool
196 is_oacc_parallel_or_serial (omp_context *ctx)
198 enum gimple_code outer_type = gimple_code (ctx->stmt);
199 return ((outer_type == GIMPLE_OMP_TARGET)
200 && ((gimple_omp_target_kind (ctx->stmt)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
202 || (gimple_omp_target_kind (ctx->stmt)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
206 /* Return true if CTX corresponds to an oacc kernels region. */
208 static bool
209 is_oacc_kernels (omp_context *ctx)
211 enum gimple_code outer_type = gimple_code (ctx->stmt);
212 return ((outer_type == GIMPLE_OMP_TARGET)
213 && (gimple_omp_target_kind (ctx->stmt)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS));
217 /* If DECL is the artificial dummy VAR_DECL created for non-static
218 data member privatization, return the underlying "this" parameter,
219 otherwise return NULL. */
221 tree
222 omp_member_access_dummy_var (tree decl)
224 if (!VAR_P (decl)
225 || !DECL_ARTIFICIAL (decl)
226 || !DECL_IGNORED_P (decl)
227 || !DECL_HAS_VALUE_EXPR_P (decl)
228 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
229 return NULL_TREE;
231 tree v = DECL_VALUE_EXPR (decl);
232 if (TREE_CODE (v) != COMPONENT_REF)
233 return NULL_TREE;
235 while (1)
236 switch (TREE_CODE (v))
238 case COMPONENT_REF:
239 case MEM_REF:
240 case INDIRECT_REF:
241 CASE_CONVERT:
242 case POINTER_PLUS_EXPR:
243 v = TREE_OPERAND (v, 0);
244 continue;
245 case PARM_DECL:
246 if (DECL_CONTEXT (v) == current_function_decl
247 && DECL_ARTIFICIAL (v)
248 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
249 return v;
250 return NULL_TREE;
251 default:
252 return NULL_TREE;
256 /* Helper for unshare_and_remap, called through walk_tree. */
258 static tree
259 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
261 tree *pair = (tree *) data;
262 if (*tp == pair[0])
264 *tp = unshare_expr (pair[1]);
265 *walk_subtrees = 0;
267 else if (IS_TYPE_OR_DECL_P (*tp))
268 *walk_subtrees = 0;
269 return NULL_TREE;
272 /* Return unshare_expr (X) with all occurrences of FROM
273 replaced with TO. */
275 static tree
276 unshare_and_remap (tree x, tree from, tree to)
278 tree pair[2] = { from, to };
279 x = unshare_expr (x);
280 walk_tree (&x, unshare_and_remap_1, pair, NULL);
281 return x;
284 /* Convenience function for calling scan_omp_1_op on tree operands. */
286 static inline tree
287 scan_omp_op (tree *tp, omp_context *ctx)
289 struct walk_stmt_info wi;
291 memset (&wi, 0, sizeof (wi));
292 wi.info = ctx;
293 wi.want_locations = true;
295 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
298 static void lower_omp (gimple_seq *, omp_context *);
299 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
300 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
302 /* Return true if CTX is for an omp parallel. */
304 static inline bool
305 is_parallel_ctx (omp_context *ctx)
307 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
311 /* Return true if CTX is for an omp task. */
313 static inline bool
314 is_task_ctx (omp_context *ctx)
316 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
320 /* Return true if CTX is for an omp taskloop. */
322 static inline bool
323 is_taskloop_ctx (omp_context *ctx)
325 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
326 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
330 /* Return true if CTX is for a host omp teams. */
332 static inline bool
333 is_host_teams_ctx (omp_context *ctx)
335 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
336 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
339 /* Return true if CTX is for an omp parallel or omp task or host omp teams
340 (the last one is strictly not a task region in OpenMP speak, but we
341 need to treat it similarly). */
343 static inline bool
344 is_taskreg_ctx (omp_context *ctx)
346 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
349 /* Return true if EXPR is variable sized. */
351 static inline bool
352 is_variable_sized (const_tree expr)
354 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
357 /* Lookup variables. The "maybe" form
358 allows for the variable form to not have been entered, otherwise we
359 assert that the variable must have been entered. */
361 static inline tree
362 lookup_decl (tree var, omp_context *ctx)
364 tree *n = ctx->cb.decl_map->get (var);
365 return *n;
368 static inline tree
369 maybe_lookup_decl (const_tree var, omp_context *ctx)
371 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
372 return n ? *n : NULL_TREE;
375 static inline tree
376 lookup_field (tree var, omp_context *ctx)
378 splay_tree_node n;
379 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
380 return (tree) n->value;
383 static inline tree
384 lookup_sfield (splay_tree_key key, omp_context *ctx)
386 splay_tree_node n;
387 n = splay_tree_lookup (ctx->sfield_map
388 ? ctx->sfield_map : ctx->field_map, key);
389 return (tree) n->value;
392 static inline tree
393 lookup_sfield (tree var, omp_context *ctx)
395 return lookup_sfield ((splay_tree_key) var, ctx);
398 static inline tree
399 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
401 splay_tree_node n;
402 n = splay_tree_lookup (ctx->field_map, key);
403 return n ? (tree) n->value : NULL_TREE;
406 static inline tree
407 maybe_lookup_field (tree var, omp_context *ctx)
409 return maybe_lookup_field ((splay_tree_key) var, ctx);
412 /* Return true if DECL should be copied by pointer. SHARED_CTX is
413 the parallel context if DECL is to be shared. */
415 static bool
416 use_pointer_for_field (tree decl, omp_context *shared_ctx)
418 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
419 || TYPE_ATOMIC (TREE_TYPE (decl)))
420 return true;
422 /* We can only use copy-in/copy-out semantics for shared variables
423 when we know the value is not accessible from an outer scope. */
424 if (shared_ctx)
426 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
428 /* ??? Trivially accessible from anywhere. But why would we even
429 be passing an address in this case? Should we simply assert
430 this to be false, or should we have a cleanup pass that removes
431 these from the list of mappings? */
432 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
433 return true;
435 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
436 without analyzing the expression whether or not its location
437 is accessible to anyone else. In the case of nested parallel
438 regions it certainly may be. */
439 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
440 return true;
442 /* Do not use copy-in/copy-out for variables that have their
443 address taken. */
444 if (is_global_var (decl))
446 /* For file scope vars, track whether we've seen them as
447 non-addressable initially and in that case, keep the same
448 answer for the duration of the pass, even when they are made
449 addressable later on e.g. through reduction expansion. Global
450 variables which weren't addressable before the pass will not
451 have their privatized copies address taken. See PR91216. */
452 if (!TREE_ADDRESSABLE (decl))
454 if (!global_nonaddressable_vars)
455 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
456 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
458 else if (!global_nonaddressable_vars
459 || !bitmap_bit_p (global_nonaddressable_vars,
460 DECL_UID (decl)))
461 return true;
463 else if (TREE_ADDRESSABLE (decl))
464 return true;
466 /* lower_send_shared_vars only uses copy-in, but not copy-out
467 for these. */
468 if (TREE_READONLY (decl)
469 || ((TREE_CODE (decl) == RESULT_DECL
470 || TREE_CODE (decl) == PARM_DECL)
471 && DECL_BY_REFERENCE (decl)))
472 return false;
474 /* Disallow copy-in/out in nested parallel if
475 decl is shared in outer parallel, otherwise
476 each thread could store the shared variable
477 in its own copy-in location, making the
478 variable no longer really shared. */
479 if (shared_ctx->is_nested)
481 omp_context *up;
483 for (up = shared_ctx->outer; up; up = up->outer)
484 if ((is_taskreg_ctx (up)
485 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
486 && is_gimple_omp_offloaded (up->stmt)))
487 && maybe_lookup_decl (decl, up))
488 break;
490 if (up)
492 tree c;
494 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
496 for (c = gimple_omp_target_clauses (up->stmt);
497 c; c = OMP_CLAUSE_CHAIN (c))
498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
499 && OMP_CLAUSE_DECL (c) == decl)
500 break;
502 else
503 for (c = gimple_omp_taskreg_clauses (up->stmt);
504 c; c = OMP_CLAUSE_CHAIN (c))
505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
506 && OMP_CLAUSE_DECL (c) == decl)
507 break;
509 if (c)
510 goto maybe_mark_addressable_and_ret;
514 /* For tasks avoid using copy-in/out. As tasks can be
515 deferred or executed in different thread, when GOMP_task
516 returns, the task hasn't necessarily terminated. */
517 if (is_task_ctx (shared_ctx))
519 tree outer;
520 maybe_mark_addressable_and_ret:
521 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
522 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
524 /* Taking address of OUTER in lower_send_shared_vars
525 might need regimplification of everything that uses the
526 variable. */
527 if (!task_shared_vars)
528 task_shared_vars = BITMAP_ALLOC (NULL);
529 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
530 TREE_ADDRESSABLE (outer) = 1;
532 return true;
536 return false;
539 /* Construct a new automatic decl similar to VAR. */
541 static tree
542 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
544 tree copy = copy_var_decl (var, name, type);
546 DECL_CONTEXT (copy) = current_function_decl;
547 DECL_CHAIN (copy) = ctx->block_vars;
548 /* If VAR is listed in task_shared_vars, it means it wasn't
549 originally addressable and is just because task needs to take
550 it's address. But we don't need to take address of privatizations
551 from that var. */
552 if (TREE_ADDRESSABLE (var)
553 && ((task_shared_vars
554 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
555 || (global_nonaddressable_vars
556 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
557 TREE_ADDRESSABLE (copy) = 0;
558 ctx->block_vars = copy;
560 return copy;
563 static tree
564 omp_copy_decl_1 (tree var, omp_context *ctx)
566 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
569 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
570 as appropriate. */
571 static tree
572 omp_build_component_ref (tree obj, tree field)
574 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
575 if (TREE_THIS_VOLATILE (field))
576 TREE_THIS_VOLATILE (ret) |= 1;
577 if (TREE_READONLY (field))
578 TREE_READONLY (ret) |= 1;
579 return ret;
582 /* Build tree nodes to access the field for VAR on the receiver side. */
584 static tree
585 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
587 tree x, field = lookup_field (var, ctx);
589 /* If the receiver record type was remapped in the child function,
590 remap the field into the new record type. */
591 x = maybe_lookup_field (field, ctx);
592 if (x != NULL)
593 field = x;
595 x = build_simple_mem_ref (ctx->receiver_decl);
596 TREE_THIS_NOTRAP (x) = 1;
597 x = omp_build_component_ref (x, field);
598 if (by_ref)
600 x = build_simple_mem_ref (x);
601 TREE_THIS_NOTRAP (x) = 1;
604 return x;
607 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
608 of a parallel, this is a component reference; for workshare constructs
609 this is some variable. */
611 static tree
612 build_outer_var_ref (tree var, omp_context *ctx,
613 enum omp_clause_code code = OMP_CLAUSE_ERROR)
615 tree x;
616 omp_context *outer = ctx->outer;
617 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
618 outer = outer->outer;
620 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
621 x = var;
622 else if (is_variable_sized (var))
624 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
625 x = build_outer_var_ref (x, ctx, code);
626 x = build_simple_mem_ref (x);
628 else if (is_taskreg_ctx (ctx))
630 bool by_ref = use_pointer_for_field (var, NULL);
631 x = build_receiver_ref (var, by_ref, ctx);
633 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
634 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
635 || ctx->loop_p
636 || (code == OMP_CLAUSE_PRIVATE
637 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
638 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
639 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
641 /* #pragma omp simd isn't a worksharing construct, and can reference
642 even private vars in its linear etc. clauses.
643 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
644 to private vars in all worksharing constructs. */
645 x = NULL_TREE;
646 if (outer && is_taskreg_ctx (outer))
647 x = lookup_decl (var, outer);
648 else if (outer)
649 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
650 if (x == NULL_TREE)
651 x = var;
653 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
655 gcc_assert (outer);
656 splay_tree_node n
657 = splay_tree_lookup (outer->field_map,
658 (splay_tree_key) &DECL_UID (var));
659 if (n == NULL)
661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
662 x = var;
663 else
664 x = lookup_decl (var, outer);
666 else
668 tree field = (tree) n->value;
669 /* If the receiver record type was remapped in the child function,
670 remap the field into the new record type. */
671 x = maybe_lookup_field (field, outer);
672 if (x != NULL)
673 field = x;
675 x = build_simple_mem_ref (outer->receiver_decl);
676 x = omp_build_component_ref (x, field);
677 if (use_pointer_for_field (var, outer))
678 x = build_simple_mem_ref (x);
681 else if (outer)
682 x = lookup_decl (var, outer);
683 else if (omp_is_reference (var))
684 /* This can happen with orphaned constructs. If var is reference, it is
685 possible it is shared and as such valid. */
686 x = var;
687 else if (omp_member_access_dummy_var (var))
688 x = var;
689 else
690 gcc_unreachable ();
692 if (x == var)
694 tree t = omp_member_access_dummy_var (var);
695 if (t)
697 x = DECL_VALUE_EXPR (var);
698 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
699 if (o != t)
700 x = unshare_and_remap (x, t, o);
701 else
702 x = unshare_expr (x);
706 if (omp_is_reference (var))
707 x = build_simple_mem_ref (x);
709 return x;
712 /* Build tree nodes to access the field for VAR on the sender side. */
714 static tree
715 build_sender_ref (splay_tree_key key, omp_context *ctx)
717 tree field = lookup_sfield (key, ctx);
718 return omp_build_component_ref (ctx->sender_decl, field);
721 static tree
722 build_sender_ref (tree var, omp_context *ctx)
724 return build_sender_ref ((splay_tree_key) var, ctx);
727 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
728 BASE_POINTERS_RESTRICT, declare the field with restrict. */
730 static void
731 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
733 tree field, type, sfield = NULL_TREE;
734 splay_tree_key key = (splay_tree_key) var;
736 if ((mask & 16) != 0)
738 key = (splay_tree_key) &DECL_NAME (var);
739 gcc_checking_assert (key != (splay_tree_key) var);
741 if ((mask & 8) != 0)
743 key = (splay_tree_key) &DECL_UID (var);
744 gcc_checking_assert (key != (splay_tree_key) var);
746 gcc_assert ((mask & 1) == 0
747 || !splay_tree_lookup (ctx->field_map, key));
748 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
749 || !splay_tree_lookup (ctx->sfield_map, key));
750 gcc_assert ((mask & 3) == 3
751 || !is_gimple_omp_oacc (ctx->stmt));
753 type = TREE_TYPE (var);
754 if ((mask & 16) != 0)
755 type = lang_hooks.decls.omp_array_data (var, true);
757 /* Prevent redeclaring the var in the split-off function with a restrict
758 pointer type. Note that we only clear type itself, restrict qualifiers in
759 the pointed-to type will be ignored by points-to analysis. */
760 if (POINTER_TYPE_P (type)
761 && TYPE_RESTRICT (type))
762 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
764 if (mask & 4)
766 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
767 type = build_pointer_type (build_pointer_type (type));
769 else if (by_ref)
770 type = build_pointer_type (type);
771 else if ((mask & 3) == 1 && omp_is_reference (var))
772 type = TREE_TYPE (type);
774 field = build_decl (DECL_SOURCE_LOCATION (var),
775 FIELD_DECL, DECL_NAME (var), type);
777 /* Remember what variable this field was created for. This does have a
778 side effect of making dwarf2out ignore this member, so for helpful
779 debugging we clear it later in delete_omp_context. */
780 DECL_ABSTRACT_ORIGIN (field) = var;
781 if ((mask & 16) == 0 && type == TREE_TYPE (var))
783 SET_DECL_ALIGN (field, DECL_ALIGN (var));
784 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
785 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
787 else
788 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
790 if ((mask & 3) == 3)
792 insert_field_into_struct (ctx->record_type, field);
793 if (ctx->srecord_type)
795 sfield = build_decl (DECL_SOURCE_LOCATION (var),
796 FIELD_DECL, DECL_NAME (var), type);
797 DECL_ABSTRACT_ORIGIN (sfield) = var;
798 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
799 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
800 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
801 insert_field_into_struct (ctx->srecord_type, sfield);
804 else
806 if (ctx->srecord_type == NULL_TREE)
808 tree t;
810 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
811 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
812 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
814 sfield = build_decl (DECL_SOURCE_LOCATION (t),
815 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
816 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
817 insert_field_into_struct (ctx->srecord_type, sfield);
818 splay_tree_insert (ctx->sfield_map,
819 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
820 (splay_tree_value) sfield);
823 sfield = field;
824 insert_field_into_struct ((mask & 1) ? ctx->record_type
825 : ctx->srecord_type, field);
828 if (mask & 1)
829 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
830 if ((mask & 2) && ctx->sfield_map)
831 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
834 static tree
835 install_var_local (tree var, omp_context *ctx)
837 tree new_var = omp_copy_decl_1 (var, ctx);
838 insert_decl_map (&ctx->cb, var, new_var);
839 return new_var;
842 /* Adjust the replacement for DECL in CTX for the new context. This means
843 copying the DECL_VALUE_EXPR, and fixing up the type. */
845 static void
846 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
848 tree new_decl, size;
850 new_decl = lookup_decl (decl, ctx);
852 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
854 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
855 && DECL_HAS_VALUE_EXPR_P (decl))
857 tree ve = DECL_VALUE_EXPR (decl);
858 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
859 SET_DECL_VALUE_EXPR (new_decl, ve);
860 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
863 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
865 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
866 if (size == error_mark_node)
867 size = TYPE_SIZE (TREE_TYPE (new_decl));
868 DECL_SIZE (new_decl) = size;
870 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
871 if (size == error_mark_node)
872 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
873 DECL_SIZE_UNIT (new_decl) = size;
877 /* The callback for remap_decl. Search all containing contexts for a
878 mapping of the variable; this avoids having to duplicate the splay
879 tree ahead of time. We know a mapping doesn't already exist in the
880 given context. Create new mappings to implement default semantics. */
882 static tree
883 omp_copy_decl (tree var, copy_body_data *cb)
885 omp_context *ctx = (omp_context *) cb;
886 tree new_var;
888 if (TREE_CODE (var) == LABEL_DECL)
890 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
891 return var;
892 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
893 DECL_CONTEXT (new_var) = current_function_decl;
894 insert_decl_map (&ctx->cb, var, new_var);
895 return new_var;
898 while (!is_taskreg_ctx (ctx))
900 ctx = ctx->outer;
901 if (ctx == NULL)
902 return var;
903 new_var = maybe_lookup_decl (var, ctx);
904 if (new_var)
905 return new_var;
908 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
909 return var;
911 return error_mark_node;
914 /* Create a new context, with OUTER_CTX being the surrounding context. */
916 static omp_context *
917 new_omp_context (gimple *stmt, omp_context *outer_ctx)
919 omp_context *ctx = XCNEW (omp_context);
921 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
922 (splay_tree_value) ctx);
923 ctx->stmt = stmt;
925 if (outer_ctx)
927 ctx->outer = outer_ctx;
928 ctx->cb = outer_ctx->cb;
929 ctx->cb.block = NULL;
930 ctx->depth = outer_ctx->depth + 1;
932 else
934 ctx->cb.src_fn = current_function_decl;
935 ctx->cb.dst_fn = current_function_decl;
936 ctx->cb.src_node = cgraph_node::get (current_function_decl);
937 gcc_checking_assert (ctx->cb.src_node);
938 ctx->cb.dst_node = ctx->cb.src_node;
939 ctx->cb.src_cfun = cfun;
940 ctx->cb.copy_decl = omp_copy_decl;
941 ctx->cb.eh_lp_nr = 0;
942 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
943 ctx->cb.adjust_array_error_bounds = true;
944 ctx->cb.dont_remap_vla_if_no_change = true;
945 ctx->depth = 1;
948 ctx->cb.decl_map = new hash_map<tree, tree>;
950 return ctx;
953 static gimple_seq maybe_catch_exception (gimple_seq);
955 /* Finalize task copyfn. */
957 static void
958 finalize_task_copyfn (gomp_task *task_stmt)
960 struct function *child_cfun;
961 tree child_fn;
962 gimple_seq seq = NULL, new_seq;
963 gbind *bind;
965 child_fn = gimple_omp_task_copy_fn (task_stmt);
966 if (child_fn == NULL_TREE)
967 return;
969 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
970 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
972 push_cfun (child_cfun);
973 bind = gimplify_body (child_fn, false);
974 gimple_seq_add_stmt (&seq, bind);
975 new_seq = maybe_catch_exception (seq);
976 if (new_seq != seq)
978 bind = gimple_build_bind (NULL, new_seq, NULL);
979 seq = NULL;
980 gimple_seq_add_stmt (&seq, bind);
982 gimple_set_body (child_fn, seq);
983 pop_cfun ();
985 /* Inform the callgraph about the new function. */
986 cgraph_node *node = cgraph_node::get_create (child_fn);
987 node->parallelized_function = 1;
988 cgraph_node::add_new_function (child_fn, false);
991 /* Destroy a omp_context data structures. Called through the splay tree
992 value delete callback. */
994 static void
995 delete_omp_context (splay_tree_value value)
997 omp_context *ctx = (omp_context *) value;
999 delete ctx->cb.decl_map;
1001 if (ctx->field_map)
1002 splay_tree_delete (ctx->field_map);
1003 if (ctx->sfield_map)
1004 splay_tree_delete (ctx->sfield_map);
1006 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1007 it produces corrupt debug information. */
1008 if (ctx->record_type)
1010 tree t;
1011 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1012 DECL_ABSTRACT_ORIGIN (t) = NULL;
1014 if (ctx->srecord_type)
1016 tree t;
1017 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1018 DECL_ABSTRACT_ORIGIN (t) = NULL;
1021 if (is_task_ctx (ctx))
1022 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1024 if (ctx->task_reduction_map)
1026 ctx->task_reductions.release ();
1027 delete ctx->task_reduction_map;
1030 delete ctx->lastprivate_conditional_map;
1032 XDELETE (ctx);
1035 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1036 context. */
1038 static void
1039 fixup_child_record_type (omp_context *ctx)
1041 tree f, type = ctx->record_type;
1043 if (!ctx->receiver_decl)
1044 return;
1045 /* ??? It isn't sufficient to just call remap_type here, because
1046 variably_modified_type_p doesn't work the way we expect for
1047 record types. Testing each field for whether it needs remapping
1048 and creating a new record by hand works, however. */
1049 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1050 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1051 break;
1052 if (f)
1054 tree name, new_fields = NULL;
1056 type = lang_hooks.types.make_type (RECORD_TYPE);
1057 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1058 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1059 TYPE_DECL, name, type);
1060 TYPE_NAME (type) = name;
1062 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1064 tree new_f = copy_node (f);
1065 DECL_CONTEXT (new_f) = type;
1066 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1067 DECL_CHAIN (new_f) = new_fields;
1068 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1069 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1070 &ctx->cb, NULL);
1071 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1072 &ctx->cb, NULL);
1073 new_fields = new_f;
1075 /* Arrange to be able to look up the receiver field
1076 given the sender field. */
1077 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1078 (splay_tree_value) new_f);
1080 TYPE_FIELDS (type) = nreverse (new_fields);
1081 layout_type (type);
1084 /* In a target region we never modify any of the pointers in *.omp_data_i,
1085 so attempt to help the optimizers. */
1086 if (is_gimple_omp_offloaded (ctx->stmt))
1087 type = build_qualified_type (type, TYPE_QUAL_CONST);
1089 TREE_TYPE (ctx->receiver_decl)
1090 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1093 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1094 specified by CLAUSES. */
1096 static void
1097 scan_sharing_clauses (tree clauses, omp_context *ctx)
1099 tree c, decl;
1100 bool scan_array_reductions = false;
1102 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1104 bool by_ref;
1106 switch (OMP_CLAUSE_CODE (c))
1108 case OMP_CLAUSE_PRIVATE:
1109 decl = OMP_CLAUSE_DECL (c);
1110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1111 goto do_private;
1112 else if (!is_variable_sized (decl))
1113 install_var_local (decl, ctx);
1114 break;
1116 case OMP_CLAUSE_SHARED:
1117 decl = OMP_CLAUSE_DECL (c);
1118 /* Ignore shared directives in teams construct inside of
1119 target construct. */
1120 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1121 && !is_host_teams_ctx (ctx))
1123 /* Global variables don't need to be copied,
1124 the receiver side will use them directly. */
1125 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1126 if (is_global_var (odecl))
1127 break;
1128 insert_decl_map (&ctx->cb, decl, odecl);
1129 break;
1131 gcc_assert (is_taskreg_ctx (ctx));
1132 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1133 || !is_variable_sized (decl));
1134 /* Global variables don't need to be copied,
1135 the receiver side will use them directly. */
1136 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1137 break;
1138 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1140 use_pointer_for_field (decl, ctx);
1141 break;
1143 by_ref = use_pointer_for_field (decl, NULL);
1144 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1145 || TREE_ADDRESSABLE (decl)
1146 || by_ref
1147 || omp_is_reference (decl))
1149 by_ref = use_pointer_for_field (decl, ctx);
1150 install_var_field (decl, by_ref, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1154 /* We don't need to copy const scalar vars back. */
1155 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1156 goto do_private;
1158 case OMP_CLAUSE_REDUCTION:
1159 if (is_oacc_parallel_or_serial (ctx) || is_oacc_kernels (ctx))
1160 ctx->local_reduction_clauses
1161 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1162 /* FALLTHRU */
1164 case OMP_CLAUSE_IN_REDUCTION:
1165 decl = OMP_CLAUSE_DECL (c);
1166 if (TREE_CODE (decl) == MEM_REF)
1168 tree t = TREE_OPERAND (decl, 0);
1169 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1170 t = TREE_OPERAND (t, 0);
1171 if (TREE_CODE (t) == INDIRECT_REF
1172 || TREE_CODE (t) == ADDR_EXPR)
1173 t = TREE_OPERAND (t, 0);
1174 install_var_local (t, ctx);
1175 if (is_taskreg_ctx (ctx)
1176 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1177 || (is_task_ctx (ctx)
1178 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1179 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1181 == POINTER_TYPE)))))
1182 && !is_variable_sized (t)
1183 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1184 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1185 && !is_task_ctx (ctx))))
1187 by_ref = use_pointer_for_field (t, NULL);
1188 if (is_task_ctx (ctx)
1189 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1190 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1192 install_var_field (t, false, 1, ctx);
1193 install_var_field (t, by_ref, 2, ctx);
1195 else
1196 install_var_field (t, by_ref, 3, ctx);
1198 break;
1200 if (is_task_ctx (ctx)
1201 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1202 && OMP_CLAUSE_REDUCTION_TASK (c)
1203 && is_parallel_ctx (ctx)))
1205 /* Global variables don't need to be copied,
1206 the receiver side will use them directly. */
1207 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1209 by_ref = use_pointer_for_field (decl, ctx);
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1211 install_var_field (decl, by_ref, 3, ctx);
1213 install_var_local (decl, ctx);
1214 break;
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c))
1219 install_var_local (decl, ctx);
1220 break;
1222 goto do_private;
1224 case OMP_CLAUSE_LASTPRIVATE:
1225 /* Let the corresponding firstprivate clause create
1226 the variable. */
1227 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1228 break;
1229 /* FALLTHRU */
1231 case OMP_CLAUSE_FIRSTPRIVATE:
1232 case OMP_CLAUSE_LINEAR:
1233 decl = OMP_CLAUSE_DECL (c);
1234 do_private:
1235 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1236 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1237 && is_gimple_omp_offloaded (ctx->stmt))
1239 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1240 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1241 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1242 install_var_field (decl, true, 3, ctx);
1243 else
1244 install_var_field (decl, false, 3, ctx);
1246 if (is_variable_sized (decl))
1248 if (is_task_ctx (ctx))
1249 install_var_field (decl, false, 1, ctx);
1250 break;
1252 else if (is_taskreg_ctx (ctx))
1254 bool global
1255 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1256 by_ref = use_pointer_for_field (decl, NULL);
1258 if (is_task_ctx (ctx)
1259 && (global || by_ref || omp_is_reference (decl)))
1261 install_var_field (decl, false, 1, ctx);
1262 if (!global)
1263 install_var_field (decl, by_ref, 2, ctx);
1265 else if (!global)
1266 install_var_field (decl, by_ref, 3, ctx);
1268 install_var_local (decl, ctx);
1269 break;
1271 case OMP_CLAUSE_USE_DEVICE_PTR:
1272 case OMP_CLAUSE_USE_DEVICE_ADDR:
1273 decl = OMP_CLAUSE_DECL (c);
1275 /* Fortran array descriptors. */
1276 if (lang_hooks.decls.omp_array_data (decl, true))
1277 install_var_field (decl, false, 19, ctx);
1278 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1279 && !omp_is_reference (decl)
1280 && !omp_is_allocatable_or_ptr (decl))
1281 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1282 install_var_field (decl, true, 11, ctx);
1283 else
1284 install_var_field (decl, false, 11, ctx);
1285 if (DECL_SIZE (decl)
1286 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1288 tree decl2 = DECL_VALUE_EXPR (decl);
1289 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1290 decl2 = TREE_OPERAND (decl2, 0);
1291 gcc_assert (DECL_P (decl2));
1292 install_var_local (decl2, ctx);
1294 install_var_local (decl, ctx);
1295 break;
1297 case OMP_CLAUSE_IS_DEVICE_PTR:
1298 decl = OMP_CLAUSE_DECL (c);
1299 goto do_private;
1301 case OMP_CLAUSE__LOOPTEMP_:
1302 case OMP_CLAUSE__REDUCTEMP_:
1303 gcc_assert (is_taskreg_ctx (ctx));
1304 decl = OMP_CLAUSE_DECL (c);
1305 install_var_field (decl, false, 3, ctx);
1306 install_var_local (decl, ctx);
1307 break;
1309 case OMP_CLAUSE_COPYPRIVATE:
1310 case OMP_CLAUSE_COPYIN:
1311 decl = OMP_CLAUSE_DECL (c);
1312 by_ref = use_pointer_for_field (decl, NULL);
1313 install_var_field (decl, by_ref, 3, ctx);
1314 break;
1316 case OMP_CLAUSE_FINAL:
1317 case OMP_CLAUSE_IF:
1318 case OMP_CLAUSE_NUM_THREADS:
1319 case OMP_CLAUSE_NUM_TEAMS:
1320 case OMP_CLAUSE_THREAD_LIMIT:
1321 case OMP_CLAUSE_DEVICE:
1322 case OMP_CLAUSE_SCHEDULE:
1323 case OMP_CLAUSE_DIST_SCHEDULE:
1324 case OMP_CLAUSE_DEPEND:
1325 case OMP_CLAUSE_PRIORITY:
1326 case OMP_CLAUSE_GRAINSIZE:
1327 case OMP_CLAUSE_NUM_TASKS:
1328 case OMP_CLAUSE_NUM_GANGS:
1329 case OMP_CLAUSE_NUM_WORKERS:
1330 case OMP_CLAUSE_VECTOR_LENGTH:
1331 if (ctx->outer)
1332 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1333 break;
1335 case OMP_CLAUSE_TO:
1336 case OMP_CLAUSE_FROM:
1337 case OMP_CLAUSE_MAP:
1338 if (ctx->outer)
1339 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1340 decl = OMP_CLAUSE_DECL (c);
1341 /* Global variables with "omp declare target" attribute
1342 don't need to be copied, the receiver side will use them
1343 directly. However, global variables with "omp declare target link"
1344 attribute need to be copied. Or when ALWAYS modifier is used. */
1345 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1346 && DECL_P (decl)
1347 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1348 && (OMP_CLAUSE_MAP_KIND (c)
1349 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1350 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1351 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1352 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1353 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1354 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1355 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1356 && varpool_node::get_create (decl)->offloadable
1357 && !lookup_attribute ("omp declare target link",
1358 DECL_ATTRIBUTES (decl)))
1359 break;
1360 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1361 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1363 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1364 not offloaded; there is nothing to map for those. */
1365 if (!is_gimple_omp_offloaded (ctx->stmt)
1366 && !POINTER_TYPE_P (TREE_TYPE (decl))
1367 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1368 break;
1370 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1371 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1372 || (OMP_CLAUSE_MAP_KIND (c)
1373 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1375 if (TREE_CODE (decl) == COMPONENT_REF
1376 || (TREE_CODE (decl) == INDIRECT_REF
1377 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1378 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1379 == REFERENCE_TYPE)))
1380 break;
1381 if (DECL_SIZE (decl)
1382 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1384 tree decl2 = DECL_VALUE_EXPR (decl);
1385 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1386 decl2 = TREE_OPERAND (decl2, 0);
1387 gcc_assert (DECL_P (decl2));
1388 install_var_local (decl2, ctx);
1390 install_var_local (decl, ctx);
1391 break;
1393 if (DECL_P (decl))
1395 if (DECL_SIZE (decl)
1396 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1398 tree decl2 = DECL_VALUE_EXPR (decl);
1399 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1400 decl2 = TREE_OPERAND (decl2, 0);
1401 gcc_assert (DECL_P (decl2));
1402 install_var_field (decl2, true, 3, ctx);
1403 install_var_local (decl2, ctx);
1404 install_var_local (decl, ctx);
1406 else
1408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1409 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1410 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1411 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1412 install_var_field (decl, true, 7, ctx);
1413 else
1414 install_var_field (decl, true, 3, ctx);
1415 if (is_gimple_omp_offloaded (ctx->stmt)
1416 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1417 install_var_local (decl, ctx);
1420 else
1422 tree base = get_base_address (decl);
1423 tree nc = OMP_CLAUSE_CHAIN (c);
1424 if (DECL_P (base)
1425 && nc != NULL_TREE
1426 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1427 && OMP_CLAUSE_DECL (nc) == base
1428 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1429 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1431 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1432 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1434 else
1436 if (ctx->outer)
1438 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1439 decl = OMP_CLAUSE_DECL (c);
1441 gcc_assert (!splay_tree_lookup (ctx->field_map,
1442 (splay_tree_key) decl));
1443 tree field
1444 = build_decl (OMP_CLAUSE_LOCATION (c),
1445 FIELD_DECL, NULL_TREE, ptr_type_node);
1446 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1447 insert_field_into_struct (ctx->record_type, field);
1448 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1449 (splay_tree_value) field);
1452 break;
1454 case OMP_CLAUSE_ORDER:
1455 ctx->order_concurrent = true;
1456 break;
1458 case OMP_CLAUSE_BIND:
1459 ctx->loop_p = true;
1460 break;
1462 case OMP_CLAUSE_NOWAIT:
1463 case OMP_CLAUSE_ORDERED:
1464 case OMP_CLAUSE_COLLAPSE:
1465 case OMP_CLAUSE_UNTIED:
1466 case OMP_CLAUSE_MERGEABLE:
1467 case OMP_CLAUSE_PROC_BIND:
1468 case OMP_CLAUSE_SAFELEN:
1469 case OMP_CLAUSE_SIMDLEN:
1470 case OMP_CLAUSE_THREADS:
1471 case OMP_CLAUSE_SIMD:
1472 case OMP_CLAUSE_NOGROUP:
1473 case OMP_CLAUSE_DEFAULTMAP:
1474 case OMP_CLAUSE_ASYNC:
1475 case OMP_CLAUSE_WAIT:
1476 case OMP_CLAUSE_GANG:
1477 case OMP_CLAUSE_WORKER:
1478 case OMP_CLAUSE_VECTOR:
1479 case OMP_CLAUSE_INDEPENDENT:
1480 case OMP_CLAUSE_AUTO:
1481 case OMP_CLAUSE_SEQ:
1482 case OMP_CLAUSE_TILE:
1483 case OMP_CLAUSE__SIMT_:
1484 case OMP_CLAUSE_DEFAULT:
1485 case OMP_CLAUSE_NONTEMPORAL:
1486 case OMP_CLAUSE_IF_PRESENT:
1487 case OMP_CLAUSE_FINALIZE:
1488 case OMP_CLAUSE_TASK_REDUCTION:
1489 break;
1491 case OMP_CLAUSE_ALIGNED:
1492 decl = OMP_CLAUSE_DECL (c);
1493 if (is_global_var (decl)
1494 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1495 install_var_local (decl, ctx);
1496 break;
1498 case OMP_CLAUSE__CONDTEMP_:
1499 decl = OMP_CLAUSE_DECL (c);
1500 if (is_parallel_ctx (ctx))
1502 install_var_field (decl, false, 3, ctx);
1503 install_var_local (decl, ctx);
1505 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1506 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1507 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1508 install_var_local (decl, ctx);
1509 break;
1511 case OMP_CLAUSE__CACHE_:
1512 default:
1513 gcc_unreachable ();
1517 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1519 switch (OMP_CLAUSE_CODE (c))
1521 case OMP_CLAUSE_LASTPRIVATE:
1522 /* Let the corresponding firstprivate clause create
1523 the variable. */
1524 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1525 scan_array_reductions = true;
1526 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1527 break;
1528 /* FALLTHRU */
1530 case OMP_CLAUSE_FIRSTPRIVATE:
1531 case OMP_CLAUSE_PRIVATE:
1532 case OMP_CLAUSE_LINEAR:
1533 case OMP_CLAUSE_IS_DEVICE_PTR:
1534 decl = OMP_CLAUSE_DECL (c);
1535 if (is_variable_sized (decl))
1537 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1538 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1539 && is_gimple_omp_offloaded (ctx->stmt))
1541 tree decl2 = DECL_VALUE_EXPR (decl);
1542 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1543 decl2 = TREE_OPERAND (decl2, 0);
1544 gcc_assert (DECL_P (decl2));
1545 install_var_local (decl2, ctx);
1546 fixup_remapped_decl (decl2, ctx, false);
1548 install_var_local (decl, ctx);
1550 fixup_remapped_decl (decl, ctx,
1551 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1552 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1553 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1554 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1555 scan_array_reductions = true;
1556 break;
1558 case OMP_CLAUSE_REDUCTION:
1559 case OMP_CLAUSE_IN_REDUCTION:
1560 decl = OMP_CLAUSE_DECL (c);
1561 if (TREE_CODE (decl) != MEM_REF)
1563 if (is_variable_sized (decl))
1564 install_var_local (decl, ctx);
1565 fixup_remapped_decl (decl, ctx, false);
1567 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1568 scan_array_reductions = true;
1569 break;
1571 case OMP_CLAUSE_TASK_REDUCTION:
1572 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1573 scan_array_reductions = true;
1574 break;
1576 case OMP_CLAUSE_SHARED:
1577 /* Ignore shared directives in teams construct inside of
1578 target construct. */
1579 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1580 && !is_host_teams_ctx (ctx))
1581 break;
1582 decl = OMP_CLAUSE_DECL (c);
1583 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1584 break;
1585 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1587 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1588 ctx->outer)))
1589 break;
1590 bool by_ref = use_pointer_for_field (decl, ctx);
1591 install_var_field (decl, by_ref, 11, ctx);
1592 break;
1594 fixup_remapped_decl (decl, ctx, false);
1595 break;
1597 case OMP_CLAUSE_MAP:
1598 if (!is_gimple_omp_offloaded (ctx->stmt))
1599 break;
1600 decl = OMP_CLAUSE_DECL (c);
1601 if (DECL_P (decl)
1602 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1603 && (OMP_CLAUSE_MAP_KIND (c)
1604 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1605 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1606 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1607 && varpool_node::get_create (decl)->offloadable)
1608 break;
1609 if (DECL_P (decl))
1611 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1612 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1613 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1614 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1616 tree new_decl = lookup_decl (decl, ctx);
1617 TREE_TYPE (new_decl)
1618 = remap_type (TREE_TYPE (decl), &ctx->cb);
1620 else if (DECL_SIZE (decl)
1621 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1623 tree decl2 = DECL_VALUE_EXPR (decl);
1624 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1625 decl2 = TREE_OPERAND (decl2, 0);
1626 gcc_assert (DECL_P (decl2));
1627 fixup_remapped_decl (decl2, ctx, false);
1628 fixup_remapped_decl (decl, ctx, true);
1630 else
1631 fixup_remapped_decl (decl, ctx, false);
1633 break;
1635 case OMP_CLAUSE_COPYPRIVATE:
1636 case OMP_CLAUSE_COPYIN:
1637 case OMP_CLAUSE_DEFAULT:
1638 case OMP_CLAUSE_IF:
1639 case OMP_CLAUSE_NUM_THREADS:
1640 case OMP_CLAUSE_NUM_TEAMS:
1641 case OMP_CLAUSE_THREAD_LIMIT:
1642 case OMP_CLAUSE_DEVICE:
1643 case OMP_CLAUSE_SCHEDULE:
1644 case OMP_CLAUSE_DIST_SCHEDULE:
1645 case OMP_CLAUSE_NOWAIT:
1646 case OMP_CLAUSE_ORDERED:
1647 case OMP_CLAUSE_COLLAPSE:
1648 case OMP_CLAUSE_UNTIED:
1649 case OMP_CLAUSE_FINAL:
1650 case OMP_CLAUSE_MERGEABLE:
1651 case OMP_CLAUSE_PROC_BIND:
1652 case OMP_CLAUSE_SAFELEN:
1653 case OMP_CLAUSE_SIMDLEN:
1654 case OMP_CLAUSE_ALIGNED:
1655 case OMP_CLAUSE_DEPEND:
1656 case OMP_CLAUSE__LOOPTEMP_:
1657 case OMP_CLAUSE__REDUCTEMP_:
1658 case OMP_CLAUSE_TO:
1659 case OMP_CLAUSE_FROM:
1660 case OMP_CLAUSE_PRIORITY:
1661 case OMP_CLAUSE_GRAINSIZE:
1662 case OMP_CLAUSE_NUM_TASKS:
1663 case OMP_CLAUSE_THREADS:
1664 case OMP_CLAUSE_SIMD:
1665 case OMP_CLAUSE_NOGROUP:
1666 case OMP_CLAUSE_DEFAULTMAP:
1667 case OMP_CLAUSE_ORDER:
1668 case OMP_CLAUSE_BIND:
1669 case OMP_CLAUSE_USE_DEVICE_PTR:
1670 case OMP_CLAUSE_USE_DEVICE_ADDR:
1671 case OMP_CLAUSE_NONTEMPORAL:
1672 case OMP_CLAUSE_ASYNC:
1673 case OMP_CLAUSE_WAIT:
1674 case OMP_CLAUSE_NUM_GANGS:
1675 case OMP_CLAUSE_NUM_WORKERS:
1676 case OMP_CLAUSE_VECTOR_LENGTH:
1677 case OMP_CLAUSE_GANG:
1678 case OMP_CLAUSE_WORKER:
1679 case OMP_CLAUSE_VECTOR:
1680 case OMP_CLAUSE_INDEPENDENT:
1681 case OMP_CLAUSE_AUTO:
1682 case OMP_CLAUSE_SEQ:
1683 case OMP_CLAUSE_TILE:
1684 case OMP_CLAUSE__SIMT_:
1685 case OMP_CLAUSE_IF_PRESENT:
1686 case OMP_CLAUSE_FINALIZE:
1687 case OMP_CLAUSE__CONDTEMP_:
1688 break;
1690 case OMP_CLAUSE__CACHE_:
1691 default:
1692 gcc_unreachable ();
1696 gcc_checking_assert (!scan_array_reductions
1697 || !is_gimple_omp_oacc (ctx->stmt));
1698 if (scan_array_reductions)
1700 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1701 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1702 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1703 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1704 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1706 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1707 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1709 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1710 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1711 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1712 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1713 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1714 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1718 /* Create a new name for omp child function. Returns an identifier. */
1720 static tree
1721 create_omp_child_function_name (bool task_copy)
1723 return clone_function_name_numbered (current_function_decl,
1724 task_copy ? "_omp_cpyfn" : "_omp_fn");
1727 /* Return true if CTX may belong to offloaded code: either if current function
1728 is offloaded, or any enclosing context corresponds to a target region. */
1730 static bool
1731 omp_maybe_offloaded_ctx (omp_context *ctx)
1733 if (cgraph_node::get (current_function_decl)->offloadable)
1734 return true;
1735 for (; ctx; ctx = ctx->outer)
1736 if (is_gimple_omp_offloaded (ctx->stmt))
1737 return true;
1738 return false;
1741 /* Build a decl for the omp child function. It'll not contain a body
1742 yet, just the bare decl. */
1744 static void
1745 create_omp_child_function (omp_context *ctx, bool task_copy)
1747 tree decl, type, name, t;
1749 name = create_omp_child_function_name (task_copy);
1750 if (task_copy)
1751 type = build_function_type_list (void_type_node, ptr_type_node,
1752 ptr_type_node, NULL_TREE);
1753 else
1754 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1756 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1758 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1759 || !task_copy);
1760 if (!task_copy)
1761 ctx->cb.dst_fn = decl;
1762 else
1763 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1765 TREE_STATIC (decl) = 1;
1766 TREE_USED (decl) = 1;
1767 DECL_ARTIFICIAL (decl) = 1;
1768 DECL_IGNORED_P (decl) = 0;
1769 TREE_PUBLIC (decl) = 0;
1770 DECL_UNINLINABLE (decl) = 1;
1771 DECL_EXTERNAL (decl) = 0;
1772 DECL_CONTEXT (decl) = NULL_TREE;
1773 DECL_INITIAL (decl) = make_node (BLOCK);
1774 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1775 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1776 /* Remove omp declare simd attribute from the new attributes. */
1777 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1779 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1780 a = a2;
1781 a = TREE_CHAIN (a);
1782 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1783 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1784 *p = TREE_CHAIN (*p);
1785 else
1787 tree chain = TREE_CHAIN (*p);
1788 *p = copy_node (*p);
1789 p = &TREE_CHAIN (*p);
1790 *p = chain;
1793 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1794 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1795 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1796 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1797 DECL_FUNCTION_VERSIONED (decl)
1798 = DECL_FUNCTION_VERSIONED (current_function_decl);
1800 if (omp_maybe_offloaded_ctx (ctx))
1802 cgraph_node::get_create (decl)->offloadable = 1;
1803 if (ENABLE_OFFLOADING)
1804 g->have_offload = true;
1807 if (cgraph_node::get_create (decl)->offloadable
1808 && !lookup_attribute ("omp declare target",
1809 DECL_ATTRIBUTES (current_function_decl)))
1811 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1812 ? "omp target entrypoint"
1813 : "omp declare target");
1814 DECL_ATTRIBUTES (decl)
1815 = tree_cons (get_identifier (target_attr),
1816 NULL_TREE, DECL_ATTRIBUTES (decl));
1819 t = build_decl (DECL_SOURCE_LOCATION (decl),
1820 RESULT_DECL, NULL_TREE, void_type_node);
1821 DECL_ARTIFICIAL (t) = 1;
1822 DECL_IGNORED_P (t) = 1;
1823 DECL_CONTEXT (t) = decl;
1824 DECL_RESULT (decl) = t;
1826 tree data_name = get_identifier (".omp_data_i");
1827 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1828 ptr_type_node);
1829 DECL_ARTIFICIAL (t) = 1;
1830 DECL_NAMELESS (t) = 1;
1831 DECL_ARG_TYPE (t) = ptr_type_node;
1832 DECL_CONTEXT (t) = current_function_decl;
1833 TREE_USED (t) = 1;
1834 TREE_READONLY (t) = 1;
1835 DECL_ARGUMENTS (decl) = t;
1836 if (!task_copy)
1837 ctx->receiver_decl = t;
1838 else
1840 t = build_decl (DECL_SOURCE_LOCATION (decl),
1841 PARM_DECL, get_identifier (".omp_data_o"),
1842 ptr_type_node);
1843 DECL_ARTIFICIAL (t) = 1;
1844 DECL_NAMELESS (t) = 1;
1845 DECL_ARG_TYPE (t) = ptr_type_node;
1846 DECL_CONTEXT (t) = current_function_decl;
1847 TREE_USED (t) = 1;
1848 TREE_ADDRESSABLE (t) = 1;
1849 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1850 DECL_ARGUMENTS (decl) = t;
1853 /* Allocate memory for the function structure. The call to
1854 allocate_struct_function clobbers CFUN, so we need to restore
1855 it afterward. */
1856 push_struct_function (decl);
1857 cfun->function_end_locus = gimple_location (ctx->stmt);
1858 init_tree_ssa (cfun);
1859 pop_cfun ();
1862 /* Callback for walk_gimple_seq. Check if combined parallel
1863 contains gimple_omp_for_combined_into_p OMP_FOR. */
1865 tree
1866 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1867 bool *handled_ops_p,
1868 struct walk_stmt_info *wi)
1870 gimple *stmt = gsi_stmt (*gsi_p);
1872 *handled_ops_p = true;
1873 switch (gimple_code (stmt))
1875 WALK_SUBSTMTS;
1877 case GIMPLE_OMP_FOR:
1878 if (gimple_omp_for_combined_into_p (stmt)
1879 && gimple_omp_for_kind (stmt)
1880 == *(const enum gf_mask *) (wi->info))
1882 wi->info = stmt;
1883 return integer_zero_node;
1885 break;
1886 default:
1887 break;
1889 return NULL;
1892 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1894 static void
1895 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1896 omp_context *outer_ctx)
1898 struct walk_stmt_info wi;
1900 memset (&wi, 0, sizeof (wi));
1901 wi.val_only = true;
1902 wi.info = (void *) &msk;
1903 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1904 if (wi.info != (void *) &msk)
1906 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1907 struct omp_for_data fd;
1908 omp_extract_for_data (for_stmt, &fd, NULL);
1909 /* We need two temporaries with fd.loop.v type (istart/iend)
1910 and then (fd.collapse - 1) temporaries with the same
1911 type for count2 ... countN-1 vars if not constant. */
1912 size_t count = 2, i;
1913 tree type = fd.iter_type;
1914 if (fd.collapse > 1
1915 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1917 count += fd.collapse - 1;
1918 /* If there are lastprivate clauses on the inner
1919 GIMPLE_OMP_FOR, add one more temporaries for the total number
1920 of iterations (product of count1 ... countN-1). */
1921 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1922 OMP_CLAUSE_LASTPRIVATE)
1923 || (msk == GF_OMP_FOR_KIND_FOR
1924 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1925 OMP_CLAUSE_LASTPRIVATE)))
1927 tree temp = create_tmp_var (type);
1928 tree c = build_omp_clause (UNKNOWN_LOCATION,
1929 OMP_CLAUSE__LOOPTEMP_);
1930 insert_decl_map (&outer_ctx->cb, temp, temp);
1931 OMP_CLAUSE_DECL (c) = temp;
1932 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1933 gimple_omp_taskreg_set_clauses (stmt, c);
1935 if (fd.non_rect
1936 && fd.last_nonrect == fd.first_nonrect + 1)
1937 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
1938 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
1940 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
1941 tree type2 = TREE_TYPE (v);
1942 count++;
1943 for (i = 0; i < 3; i++)
1945 tree temp = create_tmp_var (type2);
1946 tree c = build_omp_clause (UNKNOWN_LOCATION,
1947 OMP_CLAUSE__LOOPTEMP_);
1948 insert_decl_map (&outer_ctx->cb, temp, temp);
1949 OMP_CLAUSE_DECL (c) = temp;
1950 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1951 gimple_omp_taskreg_set_clauses (stmt, c);
1955 for (i = 0; i < count; i++)
1957 tree temp = create_tmp_var (type);
1958 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1959 insert_decl_map (&outer_ctx->cb, temp, temp);
1960 OMP_CLAUSE_DECL (c) = temp;
1961 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1962 gimple_omp_taskreg_set_clauses (stmt, c);
1965 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1966 && omp_find_clause (gimple_omp_task_clauses (stmt),
1967 OMP_CLAUSE_REDUCTION))
1969 tree type = build_pointer_type (pointer_sized_int_node);
1970 tree temp = create_tmp_var (type);
1971 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1972 insert_decl_map (&outer_ctx->cb, temp, temp);
1973 OMP_CLAUSE_DECL (c) = temp;
1974 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1975 gimple_omp_task_set_clauses (stmt, c);
1979 /* Scan an OpenMP parallel directive. */
1981 static void
1982 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1984 omp_context *ctx;
1985 tree name;
1986 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1988 /* Ignore parallel directives with empty bodies, unless there
1989 are copyin clauses. */
1990 if (optimize > 0
1991 && empty_body_p (gimple_omp_body (stmt))
1992 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1993 OMP_CLAUSE_COPYIN) == NULL)
1995 gsi_replace (gsi, gimple_build_nop (), false);
1996 return;
1999 if (gimple_omp_parallel_combined_p (stmt))
2000 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2001 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2002 OMP_CLAUSE_REDUCTION);
2003 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2004 if (OMP_CLAUSE_REDUCTION_TASK (c))
2006 tree type = build_pointer_type (pointer_sized_int_node);
2007 tree temp = create_tmp_var (type);
2008 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2009 if (outer_ctx)
2010 insert_decl_map (&outer_ctx->cb, temp, temp);
2011 OMP_CLAUSE_DECL (c) = temp;
2012 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2013 gimple_omp_parallel_set_clauses (stmt, c);
2014 break;
2016 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2017 break;
2019 ctx = new_omp_context (stmt, outer_ctx);
2020 taskreg_contexts.safe_push (ctx);
2021 if (taskreg_nesting_level > 1)
2022 ctx->is_nested = true;
2023 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2024 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2025 name = create_tmp_var_name (".omp_data_s");
2026 name = build_decl (gimple_location (stmt),
2027 TYPE_DECL, name, ctx->record_type);
2028 DECL_ARTIFICIAL (name) = 1;
2029 DECL_NAMELESS (name) = 1;
2030 TYPE_NAME (ctx->record_type) = name;
2031 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2032 create_omp_child_function (ctx, false);
2033 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2035 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2036 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2038 if (TYPE_FIELDS (ctx->record_type) == NULL)
2039 ctx->record_type = ctx->receiver_decl = NULL;
2042 /* Scan an OpenMP task directive. */
2044 static void
2045 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2047 omp_context *ctx;
2048 tree name, t;
2049 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2051 /* Ignore task directives with empty bodies, unless they have depend
2052 clause. */
2053 if (optimize > 0
2054 && gimple_omp_body (stmt)
2055 && empty_body_p (gimple_omp_body (stmt))
2056 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2058 gsi_replace (gsi, gimple_build_nop (), false);
2059 return;
2062 if (gimple_omp_task_taskloop_p (stmt))
2063 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2065 ctx = new_omp_context (stmt, outer_ctx);
2067 if (gimple_omp_task_taskwait_p (stmt))
2069 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2070 return;
2073 taskreg_contexts.safe_push (ctx);
2074 if (taskreg_nesting_level > 1)
2075 ctx->is_nested = true;
2076 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2077 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2078 name = create_tmp_var_name (".omp_data_s");
2079 name = build_decl (gimple_location (stmt),
2080 TYPE_DECL, name, ctx->record_type);
2081 DECL_ARTIFICIAL (name) = 1;
2082 DECL_NAMELESS (name) = 1;
2083 TYPE_NAME (ctx->record_type) = name;
2084 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2085 create_omp_child_function (ctx, false);
2086 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2088 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2090 if (ctx->srecord_type)
2092 name = create_tmp_var_name (".omp_data_a");
2093 name = build_decl (gimple_location (stmt),
2094 TYPE_DECL, name, ctx->srecord_type);
2095 DECL_ARTIFICIAL (name) = 1;
2096 DECL_NAMELESS (name) = 1;
2097 TYPE_NAME (ctx->srecord_type) = name;
2098 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2099 create_omp_child_function (ctx, true);
2102 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2104 if (TYPE_FIELDS (ctx->record_type) == NULL)
2106 ctx->record_type = ctx->receiver_decl = NULL;
2107 t = build_int_cst (long_integer_type_node, 0);
2108 gimple_omp_task_set_arg_size (stmt, t);
2109 t = build_int_cst (long_integer_type_node, 1);
2110 gimple_omp_task_set_arg_align (stmt, t);
2114 /* Helper function for finish_taskreg_scan, called through walk_tree.
2115 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2116 tree, replace it in the expression. */
2118 static tree
2119 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2121 if (VAR_P (*tp))
2123 omp_context *ctx = (omp_context *) data;
2124 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2125 if (t != *tp)
2127 if (DECL_HAS_VALUE_EXPR_P (t))
2128 t = unshare_expr (DECL_VALUE_EXPR (t));
2129 *tp = t;
2131 *walk_subtrees = 0;
2133 else if (IS_TYPE_OR_DECL_P (*tp))
2134 *walk_subtrees = 0;
2135 return NULL_TREE;
2138 /* If any decls have been made addressable during scan_omp,
2139 adjust their fields if needed, and layout record types
2140 of parallel/task constructs. */
2142 static void
2143 finish_taskreg_scan (omp_context *ctx)
2145 if (ctx->record_type == NULL_TREE)
2146 return;
2148 /* If any task_shared_vars were needed, verify all
2149 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2150 statements if use_pointer_for_field hasn't changed
2151 because of that. If it did, update field types now. */
2152 if (task_shared_vars)
2154 tree c;
2156 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2157 c; c = OMP_CLAUSE_CHAIN (c))
2158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2159 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2161 tree decl = OMP_CLAUSE_DECL (c);
2163 /* Global variables don't need to be copied,
2164 the receiver side will use them directly. */
2165 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2166 continue;
2167 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2168 || !use_pointer_for_field (decl, ctx))
2169 continue;
2170 tree field = lookup_field (decl, ctx);
2171 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2172 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2173 continue;
2174 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2175 TREE_THIS_VOLATILE (field) = 0;
2176 DECL_USER_ALIGN (field) = 0;
2177 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2178 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2179 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2180 if (ctx->srecord_type)
2182 tree sfield = lookup_sfield (decl, ctx);
2183 TREE_TYPE (sfield) = TREE_TYPE (field);
2184 TREE_THIS_VOLATILE (sfield) = 0;
2185 DECL_USER_ALIGN (sfield) = 0;
2186 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2187 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2188 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2193 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2195 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2196 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2197 if (c)
2199 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2200 expects to find it at the start of data. */
2201 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2202 tree *p = &TYPE_FIELDS (ctx->record_type);
2203 while (*p)
2204 if (*p == f)
2206 *p = DECL_CHAIN (*p);
2207 break;
2209 else
2210 p = &DECL_CHAIN (*p);
2211 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2212 TYPE_FIELDS (ctx->record_type) = f;
2214 layout_type (ctx->record_type);
2215 fixup_child_record_type (ctx);
2217 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2219 layout_type (ctx->record_type);
2220 fixup_child_record_type (ctx);
2222 else
2224 location_t loc = gimple_location (ctx->stmt);
2225 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2226 /* Move VLA fields to the end. */
2227 p = &TYPE_FIELDS (ctx->record_type);
2228 while (*p)
2229 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2230 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2232 *q = *p;
2233 *p = TREE_CHAIN (*p);
2234 TREE_CHAIN (*q) = NULL_TREE;
2235 q = &TREE_CHAIN (*q);
2237 else
2238 p = &DECL_CHAIN (*p);
2239 *p = vla_fields;
2240 if (gimple_omp_task_taskloop_p (ctx->stmt))
2242 /* Move fields corresponding to first and second _looptemp_
2243 clause first. There are filled by GOMP_taskloop
2244 and thus need to be in specific positions. */
2245 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2246 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2247 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2248 OMP_CLAUSE__LOOPTEMP_);
2249 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2250 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2251 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2252 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2253 p = &TYPE_FIELDS (ctx->record_type);
2254 while (*p)
2255 if (*p == f1 || *p == f2 || *p == f3)
2256 *p = DECL_CHAIN (*p);
2257 else
2258 p = &DECL_CHAIN (*p);
2259 DECL_CHAIN (f1) = f2;
2260 if (c3)
2262 DECL_CHAIN (f2) = f3;
2263 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2265 else
2266 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2267 TYPE_FIELDS (ctx->record_type) = f1;
2268 if (ctx->srecord_type)
2270 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2271 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2272 if (c3)
2273 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2274 p = &TYPE_FIELDS (ctx->srecord_type);
2275 while (*p)
2276 if (*p == f1 || *p == f2 || *p == f3)
2277 *p = DECL_CHAIN (*p);
2278 else
2279 p = &DECL_CHAIN (*p);
2280 DECL_CHAIN (f1) = f2;
2281 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2282 if (c3)
2284 DECL_CHAIN (f2) = f3;
2285 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2287 else
2288 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2289 TYPE_FIELDS (ctx->srecord_type) = f1;
2292 layout_type (ctx->record_type);
2293 fixup_child_record_type (ctx);
2294 if (ctx->srecord_type)
2295 layout_type (ctx->srecord_type);
2296 tree t = fold_convert_loc (loc, long_integer_type_node,
2297 TYPE_SIZE_UNIT (ctx->record_type));
2298 if (TREE_CODE (t) != INTEGER_CST)
2300 t = unshare_expr (t);
2301 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2303 gimple_omp_task_set_arg_size (ctx->stmt, t);
2304 t = build_int_cst (long_integer_type_node,
2305 TYPE_ALIGN_UNIT (ctx->record_type));
2306 gimple_omp_task_set_arg_align (ctx->stmt, t);
2310 /* Find the enclosing offload context. */
2312 static omp_context *
2313 enclosing_target_ctx (omp_context *ctx)
2315 for (; ctx; ctx = ctx->outer)
2316 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2317 break;
2319 return ctx;
2322 /* Return true if ctx is part of an oacc kernels region. */
2324 static bool
2325 ctx_in_oacc_kernels_region (omp_context *ctx)
2327 for (;ctx != NULL; ctx = ctx->outer)
2329 gimple *stmt = ctx->stmt;
2330 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2331 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2332 return true;
2335 return false;
2338 /* Check the parallelism clauses inside a kernels regions.
2339 Until kernels handling moves to use the same loop indirection
2340 scheme as parallel, we need to do this checking early. */
2342 static unsigned
2343 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2345 bool checking = true;
2346 unsigned outer_mask = 0;
2347 unsigned this_mask = 0;
2348 bool has_seq = false, has_auto = false;
2350 if (ctx->outer)
2351 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2352 if (!stmt)
2354 checking = false;
2355 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2356 return outer_mask;
2357 stmt = as_a <gomp_for *> (ctx->stmt);
2360 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2362 switch (OMP_CLAUSE_CODE (c))
2364 case OMP_CLAUSE_GANG:
2365 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2366 break;
2367 case OMP_CLAUSE_WORKER:
2368 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2369 break;
2370 case OMP_CLAUSE_VECTOR:
2371 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2372 break;
2373 case OMP_CLAUSE_SEQ:
2374 has_seq = true;
2375 break;
2376 case OMP_CLAUSE_AUTO:
2377 has_auto = true;
2378 break;
2379 default:
2380 break;
2384 if (checking)
2386 if (has_seq && (this_mask || has_auto))
2387 error_at (gimple_location (stmt), "%<seq%> overrides other"
2388 " OpenACC loop specifiers");
2389 else if (has_auto && this_mask)
2390 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2391 " OpenACC loop specifiers");
2393 if (this_mask & outer_mask)
2394 error_at (gimple_location (stmt), "inner loop uses same"
2395 " OpenACC parallelism as containing loop");
2398 return outer_mask | this_mask;
2401 /* Scan a GIMPLE_OMP_FOR. */
2403 static omp_context *
2404 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2406 omp_context *ctx;
2407 size_t i;
2408 tree clauses = gimple_omp_for_clauses (stmt);
2410 ctx = new_omp_context (stmt, outer_ctx);
2412 if (is_gimple_omp_oacc (stmt))
2414 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2416 if (!tgt || is_oacc_parallel_or_serial (tgt))
2417 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2419 char const *check = NULL;
2421 switch (OMP_CLAUSE_CODE (c))
2423 case OMP_CLAUSE_GANG:
2424 check = "gang";
2425 break;
2427 case OMP_CLAUSE_WORKER:
2428 check = "worker";
2429 break;
2431 case OMP_CLAUSE_VECTOR:
2432 check = "vector";
2433 break;
2435 default:
2436 break;
2439 if (check && OMP_CLAUSE_OPERAND (c, 0))
2440 error_at (gimple_location (stmt),
2441 "argument not permitted on %qs clause in"
2442 " OpenACC %<parallel%> or %<serial%>", check);
2445 if (tgt && is_oacc_kernels (tgt))
2447 /* Strip out reductions, as they are not handled yet. */
2448 tree *prev_ptr = &clauses;
2450 while (tree probe = *prev_ptr)
2452 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2454 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2455 *prev_ptr = *next_ptr;
2456 else
2457 prev_ptr = next_ptr;
2460 gimple_omp_for_set_clauses (stmt, clauses);
2461 check_oacc_kernel_gwv (stmt, ctx);
2464 /* Collect all variables named in reductions on this loop. Ensure
2465 that, if this loop has a reduction on some variable v, and there is
2466 a reduction on v somewhere in an outer context, then there is a
2467 reduction on v on all intervening loops as well. */
2468 tree local_reduction_clauses = NULL;
2469 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2471 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2472 local_reduction_clauses
2473 = tree_cons (NULL, c, local_reduction_clauses);
2475 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2476 ctx->outer_reduction_clauses
2477 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2478 ctx->outer->outer_reduction_clauses);
2479 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2480 tree local_iter = local_reduction_clauses;
2481 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2483 tree local_clause = TREE_VALUE (local_iter);
2484 tree local_var = OMP_CLAUSE_DECL (local_clause);
2485 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2486 bool have_outer_reduction = false;
2487 tree ctx_iter = outer_reduction_clauses;
2488 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2490 tree outer_clause = TREE_VALUE (ctx_iter);
2491 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2492 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2493 if (outer_var == local_var && outer_op != local_op)
2495 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2496 "conflicting reduction operations for %qE",
2497 local_var);
2498 inform (OMP_CLAUSE_LOCATION (outer_clause),
2499 "location of the previous reduction for %qE",
2500 outer_var);
2502 if (outer_var == local_var)
2504 have_outer_reduction = true;
2505 break;
2508 if (have_outer_reduction)
2510 /* There is a reduction on outer_var both on this loop and on
2511 some enclosing loop. Walk up the context tree until such a
2512 loop with a reduction on outer_var is found, and complain
2513 about all intervening loops that do not have such a
2514 reduction. */
2515 struct omp_context *curr_loop = ctx->outer;
2516 bool found = false;
2517 while (curr_loop != NULL)
2519 tree curr_iter = curr_loop->local_reduction_clauses;
2520 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2522 tree curr_clause = TREE_VALUE (curr_iter);
2523 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2524 if (curr_var == local_var)
2526 found = true;
2527 break;
2530 if (!found)
2531 warning_at (gimple_location (curr_loop->stmt), 0,
2532 "nested loop in reduction needs "
2533 "reduction clause for %qE",
2534 local_var);
2535 else
2536 break;
2537 curr_loop = curr_loop->outer;
2541 ctx->local_reduction_clauses = local_reduction_clauses;
2542 ctx->outer_reduction_clauses
2543 = chainon (unshare_expr (ctx->local_reduction_clauses),
2544 ctx->outer_reduction_clauses);
2547 scan_sharing_clauses (clauses, ctx);
2549 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2550 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2552 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2553 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2554 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2555 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2557 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2558 return ctx;
2561 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2563 static void
2564 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2565 omp_context *outer_ctx)
2567 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2568 gsi_replace (gsi, bind, false);
2569 gimple_seq seq = NULL;
2570 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2571 tree cond = create_tmp_var_raw (integer_type_node);
2572 DECL_CONTEXT (cond) = current_function_decl;
2573 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2574 gimple_bind_set_vars (bind, cond);
2575 gimple_call_set_lhs (g, cond);
2576 gimple_seq_add_stmt (&seq, g);
2577 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2578 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2579 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2580 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2581 gimple_seq_add_stmt (&seq, g);
2582 g = gimple_build_label (lab1);
2583 gimple_seq_add_stmt (&seq, g);
2584 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2585 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2586 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2587 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2588 gimple_omp_for_set_clauses (new_stmt, clause);
2589 gimple_seq_add_stmt (&seq, new_stmt);
2590 g = gimple_build_goto (lab3);
2591 gimple_seq_add_stmt (&seq, g);
2592 g = gimple_build_label (lab2);
2593 gimple_seq_add_stmt (&seq, g);
2594 gimple_seq_add_stmt (&seq, stmt);
2595 g = gimple_build_label (lab3);
2596 gimple_seq_add_stmt (&seq, g);
2597 gimple_bind_set_body (bind, seq);
2598 update_stmt (bind);
2599 scan_omp_for (new_stmt, outer_ctx);
2600 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2603 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2604 struct walk_stmt_info *);
2605 static omp_context *maybe_lookup_ctx (gimple *);
2607 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2608 for scan phase loop. */
2610 static void
2611 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2612 omp_context *outer_ctx)
2614 /* The only change between inclusive and exclusive scan will be
2615 within the first simd loop, so just use inclusive in the
2616 worksharing loop. */
2617 outer_ctx->scan_inclusive = true;
2618 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2619 OMP_CLAUSE_DECL (c) = integer_zero_node;
2621 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2622 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2623 gsi_replace (gsi, input_stmt, false);
2624 gimple_seq input_body = NULL;
2625 gimple_seq_add_stmt (&input_body, stmt);
2626 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2628 gimple_stmt_iterator input1_gsi = gsi_none ();
2629 struct walk_stmt_info wi;
2630 memset (&wi, 0, sizeof (wi));
2631 wi.val_only = true;
2632 wi.info = (void *) &input1_gsi;
2633 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2634 gcc_assert (!gsi_end_p (input1_gsi));
2636 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2637 gsi_next (&input1_gsi);
2638 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2639 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2640 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2641 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2642 std::swap (input_stmt1, scan_stmt1);
2644 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2645 gimple_omp_set_body (input_stmt1, NULL);
2647 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2648 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2650 gimple_omp_set_body (input_stmt1, input_body1);
2651 gimple_omp_set_body (scan_stmt1, NULL);
2653 gimple_stmt_iterator input2_gsi = gsi_none ();
2654 memset (&wi, 0, sizeof (wi));
2655 wi.val_only = true;
2656 wi.info = (void *) &input2_gsi;
2657 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2658 NULL, &wi);
2659 gcc_assert (!gsi_end_p (input2_gsi));
2661 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2662 gsi_next (&input2_gsi);
2663 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2664 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2665 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2666 std::swap (input_stmt2, scan_stmt2);
2668 gimple_omp_set_body (input_stmt2, NULL);
2670 gimple_omp_set_body (input_stmt, input_body);
2671 gimple_omp_set_body (scan_stmt, scan_body);
2673 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2674 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2676 ctx = new_omp_context (scan_stmt, outer_ctx);
2677 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2679 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2682 /* Scan an OpenMP sections directive. */
2684 static void
2685 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2687 omp_context *ctx;
2689 ctx = new_omp_context (stmt, outer_ctx);
2690 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2691 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2694 /* Scan an OpenMP single directive. */
2696 static void
2697 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2699 omp_context *ctx;
2700 tree name;
2702 ctx = new_omp_context (stmt, outer_ctx);
2703 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2704 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2705 name = create_tmp_var_name (".omp_copy_s");
2706 name = build_decl (gimple_location (stmt),
2707 TYPE_DECL, name, ctx->record_type);
2708 TYPE_NAME (ctx->record_type) = name;
2710 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2711 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2713 if (TYPE_FIELDS (ctx->record_type) == NULL)
2714 ctx->record_type = NULL;
2715 else
2716 layout_type (ctx->record_type);
2719 /* Scan a GIMPLE_OMP_TARGET. */
2721 static void
2722 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2724 omp_context *ctx;
2725 tree name;
2726 bool offloaded = is_gimple_omp_offloaded (stmt);
2727 tree clauses = gimple_omp_target_clauses (stmt);
2729 ctx = new_omp_context (stmt, outer_ctx);
2730 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2731 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2732 name = create_tmp_var_name (".omp_data_t");
2733 name = build_decl (gimple_location (stmt),
2734 TYPE_DECL, name, ctx->record_type);
2735 DECL_ARTIFICIAL (name) = 1;
2736 DECL_NAMELESS (name) = 1;
2737 TYPE_NAME (ctx->record_type) = name;
2738 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2740 if (offloaded)
2742 create_omp_child_function (ctx, false);
2743 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2746 scan_sharing_clauses (clauses, ctx);
2747 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2749 if (TYPE_FIELDS (ctx->record_type) == NULL)
2750 ctx->record_type = ctx->receiver_decl = NULL;
2751 else
2753 TYPE_FIELDS (ctx->record_type)
2754 = nreverse (TYPE_FIELDS (ctx->record_type));
2755 if (flag_checking)
2757 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2758 for (tree field = TYPE_FIELDS (ctx->record_type);
2759 field;
2760 field = DECL_CHAIN (field))
2761 gcc_assert (DECL_ALIGN (field) == align);
2763 layout_type (ctx->record_type);
2764 if (offloaded)
2765 fixup_child_record_type (ctx);
2769 /* Scan an OpenMP teams directive. */
2771 static void
2772 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2774 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2776 if (!gimple_omp_teams_host (stmt))
2778 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2779 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2780 return;
2782 taskreg_contexts.safe_push (ctx);
2783 gcc_assert (taskreg_nesting_level == 1);
2784 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2785 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2786 tree name = create_tmp_var_name (".omp_data_s");
2787 name = build_decl (gimple_location (stmt),
2788 TYPE_DECL, name, ctx->record_type);
2789 DECL_ARTIFICIAL (name) = 1;
2790 DECL_NAMELESS (name) = 1;
2791 TYPE_NAME (ctx->record_type) = name;
2792 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2793 create_omp_child_function (ctx, false);
2794 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2796 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2797 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2799 if (TYPE_FIELDS (ctx->record_type) == NULL)
2800 ctx->record_type = ctx->receiver_decl = NULL;
2803 /* Check nesting restrictions. */
2804 static bool
2805 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2807 tree c;
2809 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2810 inside an OpenACC CTX. */
2811 if (!(is_gimple_omp (stmt)
2812 && is_gimple_omp_oacc (stmt))
2813 /* Except for atomic codes that we share with OpenMP. */
2814 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2815 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2817 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2819 error_at (gimple_location (stmt),
2820 "non-OpenACC construct inside of OpenACC routine");
2821 return false;
2823 else
2824 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2825 if (is_gimple_omp (octx->stmt)
2826 && is_gimple_omp_oacc (octx->stmt))
2828 error_at (gimple_location (stmt),
2829 "non-OpenACC construct inside of OpenACC region");
2830 return false;
2834 if (ctx != NULL)
2836 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2837 && ctx->outer
2838 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2839 ctx = ctx->outer;
2840 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2841 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2842 && !ctx->loop_p)
2844 c = NULL_TREE;
2845 if (ctx->order_concurrent
2846 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2847 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2848 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2850 error_at (gimple_location (stmt),
2851 "OpenMP constructs other than %<parallel%>, %<loop%>"
2852 " or %<simd%> may not be nested inside a region with"
2853 " the %<order(concurrent)%> clause");
2854 return false;
2856 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2858 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2859 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2861 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2862 && (ctx->outer == NULL
2863 || !gimple_omp_for_combined_into_p (ctx->stmt)
2864 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2865 || (gimple_omp_for_kind (ctx->outer->stmt)
2866 != GF_OMP_FOR_KIND_FOR)
2867 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2869 error_at (gimple_location (stmt),
2870 "%<ordered simd threads%> must be closely "
2871 "nested inside of %<for simd%> region");
2872 return false;
2874 return true;
2877 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2878 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2879 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2880 return true;
2881 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2882 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2883 return true;
2884 error_at (gimple_location (stmt),
2885 "OpenMP constructs other than "
2886 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2887 "not be nested inside %<simd%> region");
2888 return false;
2890 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2892 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2893 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2894 && omp_find_clause (gimple_omp_for_clauses (stmt),
2895 OMP_CLAUSE_BIND) == NULL_TREE))
2896 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2898 error_at (gimple_location (stmt),
2899 "only %<distribute%>, %<parallel%> or %<loop%> "
2900 "regions are allowed to be strictly nested inside "
2901 "%<teams%> region");
2902 return false;
2905 else if (ctx->order_concurrent
2906 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2907 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2908 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2909 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2911 if (ctx->loop_p)
2912 error_at (gimple_location (stmt),
2913 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2914 "%<simd%> may not be nested inside a %<loop%> region");
2915 else
2916 error_at (gimple_location (stmt),
2917 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2918 "%<simd%> may not be nested inside a region with "
2919 "the %<order(concurrent)%> clause");
2920 return false;
2923 switch (gimple_code (stmt))
2925 case GIMPLE_OMP_FOR:
2926 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2927 return true;
2928 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2930 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2932 error_at (gimple_location (stmt),
2933 "%<distribute%> region must be strictly nested "
2934 "inside %<teams%> construct");
2935 return false;
2937 return true;
2939 /* We split taskloop into task and nested taskloop in it. */
2940 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2941 return true;
2942 /* For now, hope this will change and loop bind(parallel) will not
2943 be allowed in lots of contexts. */
2944 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2945 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2946 return true;
2947 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2949 bool ok = false;
2951 if (ctx)
2952 switch (gimple_code (ctx->stmt))
2954 case GIMPLE_OMP_FOR:
2955 ok = (gimple_omp_for_kind (ctx->stmt)
2956 == GF_OMP_FOR_KIND_OACC_LOOP);
2957 break;
2959 case GIMPLE_OMP_TARGET:
2960 switch (gimple_omp_target_kind (ctx->stmt))
2962 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2963 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2964 case GF_OMP_TARGET_KIND_OACC_SERIAL:
2965 ok = true;
2966 break;
2968 default:
2969 break;
2972 default:
2973 break;
2975 else if (oacc_get_fn_attrib (current_function_decl))
2976 ok = true;
2977 if (!ok)
2979 error_at (gimple_location (stmt),
2980 "OpenACC loop directive must be associated with"
2981 " an OpenACC compute region");
2982 return false;
2985 /* FALLTHRU */
2986 case GIMPLE_CALL:
2987 if (is_gimple_call (stmt)
2988 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2989 == BUILT_IN_GOMP_CANCEL
2990 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2991 == BUILT_IN_GOMP_CANCELLATION_POINT))
2993 const char *bad = NULL;
2994 const char *kind = NULL;
2995 const char *construct
2996 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2997 == BUILT_IN_GOMP_CANCEL)
2998 ? "cancel"
2999 : "cancellation point";
3000 if (ctx == NULL)
3002 error_at (gimple_location (stmt), "orphaned %qs construct",
3003 construct);
3004 return false;
3006 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3007 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3008 : 0)
3010 case 1:
3011 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3012 bad = "parallel";
3013 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3014 == BUILT_IN_GOMP_CANCEL
3015 && !integer_zerop (gimple_call_arg (stmt, 1)))
3016 ctx->cancellable = true;
3017 kind = "parallel";
3018 break;
3019 case 2:
3020 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3021 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3022 bad = "for";
3023 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3024 == BUILT_IN_GOMP_CANCEL
3025 && !integer_zerop (gimple_call_arg (stmt, 1)))
3027 ctx->cancellable = true;
3028 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3029 OMP_CLAUSE_NOWAIT))
3030 warning_at (gimple_location (stmt), 0,
3031 "%<cancel for%> inside "
3032 "%<nowait%> for construct");
3033 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3034 OMP_CLAUSE_ORDERED))
3035 warning_at (gimple_location (stmt), 0,
3036 "%<cancel for%> inside "
3037 "%<ordered%> for construct");
3039 kind = "for";
3040 break;
3041 case 4:
3042 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3043 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3044 bad = "sections";
3045 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3046 == BUILT_IN_GOMP_CANCEL
3047 && !integer_zerop (gimple_call_arg (stmt, 1)))
3049 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3051 ctx->cancellable = true;
3052 if (omp_find_clause (gimple_omp_sections_clauses
3053 (ctx->stmt),
3054 OMP_CLAUSE_NOWAIT))
3055 warning_at (gimple_location (stmt), 0,
3056 "%<cancel sections%> inside "
3057 "%<nowait%> sections construct");
3059 else
3061 gcc_assert (ctx->outer
3062 && gimple_code (ctx->outer->stmt)
3063 == GIMPLE_OMP_SECTIONS);
3064 ctx->outer->cancellable = true;
3065 if (omp_find_clause (gimple_omp_sections_clauses
3066 (ctx->outer->stmt),
3067 OMP_CLAUSE_NOWAIT))
3068 warning_at (gimple_location (stmt), 0,
3069 "%<cancel sections%> inside "
3070 "%<nowait%> sections construct");
3073 kind = "sections";
3074 break;
3075 case 8:
3076 if (!is_task_ctx (ctx)
3077 && (!is_taskloop_ctx (ctx)
3078 || ctx->outer == NULL
3079 || !is_task_ctx (ctx->outer)))
3080 bad = "task";
3081 else
3083 for (omp_context *octx = ctx->outer;
3084 octx; octx = octx->outer)
3086 switch (gimple_code (octx->stmt))
3088 case GIMPLE_OMP_TASKGROUP:
3089 break;
3090 case GIMPLE_OMP_TARGET:
3091 if (gimple_omp_target_kind (octx->stmt)
3092 != GF_OMP_TARGET_KIND_REGION)
3093 continue;
3094 /* FALLTHRU */
3095 case GIMPLE_OMP_PARALLEL:
3096 case GIMPLE_OMP_TEAMS:
3097 error_at (gimple_location (stmt),
3098 "%<%s taskgroup%> construct not closely "
3099 "nested inside of %<taskgroup%> region",
3100 construct);
3101 return false;
3102 case GIMPLE_OMP_TASK:
3103 if (gimple_omp_task_taskloop_p (octx->stmt)
3104 && octx->outer
3105 && is_taskloop_ctx (octx->outer))
3107 tree clauses
3108 = gimple_omp_for_clauses (octx->outer->stmt);
3109 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3110 break;
3112 continue;
3113 default:
3114 continue;
3116 break;
3118 ctx->cancellable = true;
3120 kind = "taskgroup";
3121 break;
3122 default:
3123 error_at (gimple_location (stmt), "invalid arguments");
3124 return false;
3126 if (bad)
3128 error_at (gimple_location (stmt),
3129 "%<%s %s%> construct not closely nested inside of %qs",
3130 construct, kind, bad);
3131 return false;
3134 /* FALLTHRU */
3135 case GIMPLE_OMP_SECTIONS:
3136 case GIMPLE_OMP_SINGLE:
3137 for (; ctx != NULL; ctx = ctx->outer)
3138 switch (gimple_code (ctx->stmt))
3140 case GIMPLE_OMP_FOR:
3141 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3142 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3143 break;
3144 /* FALLTHRU */
3145 case GIMPLE_OMP_SECTIONS:
3146 case GIMPLE_OMP_SINGLE:
3147 case GIMPLE_OMP_ORDERED:
3148 case GIMPLE_OMP_MASTER:
3149 case GIMPLE_OMP_TASK:
3150 case GIMPLE_OMP_CRITICAL:
3151 if (is_gimple_call (stmt))
3153 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3154 != BUILT_IN_GOMP_BARRIER)
3155 return true;
3156 error_at (gimple_location (stmt),
3157 "barrier region may not be closely nested inside "
3158 "of work-sharing, %<loop%>, %<critical%>, "
3159 "%<ordered%>, %<master%>, explicit %<task%> or "
3160 "%<taskloop%> region");
3161 return false;
3163 error_at (gimple_location (stmt),
3164 "work-sharing region may not be closely nested inside "
3165 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3166 "%<master%>, explicit %<task%> or %<taskloop%> region");
3167 return false;
3168 case GIMPLE_OMP_PARALLEL:
3169 case GIMPLE_OMP_TEAMS:
3170 return true;
3171 case GIMPLE_OMP_TARGET:
3172 if (gimple_omp_target_kind (ctx->stmt)
3173 == GF_OMP_TARGET_KIND_REGION)
3174 return true;
3175 break;
3176 default:
3177 break;
3179 break;
3180 case GIMPLE_OMP_MASTER:
3181 for (; ctx != NULL; ctx = ctx->outer)
3182 switch (gimple_code (ctx->stmt))
3184 case GIMPLE_OMP_FOR:
3185 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3186 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3187 break;
3188 /* FALLTHRU */
3189 case GIMPLE_OMP_SECTIONS:
3190 case GIMPLE_OMP_SINGLE:
3191 case GIMPLE_OMP_TASK:
3192 error_at (gimple_location (stmt),
3193 "%<master%> region may not be closely nested inside "
3194 "of work-sharing, %<loop%>, explicit %<task%> or "
3195 "%<taskloop%> region");
3196 return false;
3197 case GIMPLE_OMP_PARALLEL:
3198 case GIMPLE_OMP_TEAMS:
3199 return true;
3200 case GIMPLE_OMP_TARGET:
3201 if (gimple_omp_target_kind (ctx->stmt)
3202 == GF_OMP_TARGET_KIND_REGION)
3203 return true;
3204 break;
3205 default:
3206 break;
3208 break;
3209 case GIMPLE_OMP_TASK:
3210 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3211 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3212 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3213 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3215 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3216 error_at (OMP_CLAUSE_LOCATION (c),
3217 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3218 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3219 return false;
3221 break;
3222 case GIMPLE_OMP_ORDERED:
3223 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3224 c; c = OMP_CLAUSE_CHAIN (c))
3226 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3228 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3229 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3230 continue;
3232 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3233 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3234 || kind == OMP_CLAUSE_DEPEND_SINK)
3236 tree oclause;
3237 /* Look for containing ordered(N) loop. */
3238 if (ctx == NULL
3239 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3240 || (oclause
3241 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3242 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3244 error_at (OMP_CLAUSE_LOCATION (c),
3245 "%<ordered%> construct with %<depend%> clause "
3246 "must be closely nested inside an %<ordered%> "
3247 "loop");
3248 return false;
3250 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3252 error_at (OMP_CLAUSE_LOCATION (c),
3253 "%<ordered%> construct with %<depend%> clause "
3254 "must be closely nested inside a loop with "
3255 "%<ordered%> clause with a parameter");
3256 return false;
3259 else
3261 error_at (OMP_CLAUSE_LOCATION (c),
3262 "invalid depend kind in omp %<ordered%> %<depend%>");
3263 return false;
3266 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3267 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3269 /* ordered simd must be closely nested inside of simd region,
3270 and simd region must not encounter constructs other than
3271 ordered simd, therefore ordered simd may be either orphaned,
3272 or ctx->stmt must be simd. The latter case is handled already
3273 earlier. */
3274 if (ctx != NULL)
3276 error_at (gimple_location (stmt),
3277 "%<ordered%> %<simd%> must be closely nested inside "
3278 "%<simd%> region");
3279 return false;
3282 for (; ctx != NULL; ctx = ctx->outer)
3283 switch (gimple_code (ctx->stmt))
3285 case GIMPLE_OMP_CRITICAL:
3286 case GIMPLE_OMP_TASK:
3287 case GIMPLE_OMP_ORDERED:
3288 ordered_in_taskloop:
3289 error_at (gimple_location (stmt),
3290 "%<ordered%> region may not be closely nested inside "
3291 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3292 "%<taskloop%> region");
3293 return false;
3294 case GIMPLE_OMP_FOR:
3295 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3296 goto ordered_in_taskloop;
3297 tree o;
3298 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3299 OMP_CLAUSE_ORDERED);
3300 if (o == NULL)
3302 error_at (gimple_location (stmt),
3303 "%<ordered%> region must be closely nested inside "
3304 "a loop region with an %<ordered%> clause");
3305 return false;
3307 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3308 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3310 error_at (gimple_location (stmt),
3311 "%<ordered%> region without %<depend%> clause may "
3312 "not be closely nested inside a loop region with "
3313 "an %<ordered%> clause with a parameter");
3314 return false;
3316 return true;
3317 case GIMPLE_OMP_TARGET:
3318 if (gimple_omp_target_kind (ctx->stmt)
3319 != GF_OMP_TARGET_KIND_REGION)
3320 break;
3321 /* FALLTHRU */
3322 case GIMPLE_OMP_PARALLEL:
3323 case GIMPLE_OMP_TEAMS:
3324 error_at (gimple_location (stmt),
3325 "%<ordered%> region must be closely nested inside "
3326 "a loop region with an %<ordered%> clause");
3327 return false;
3328 default:
3329 break;
3331 break;
3332 case GIMPLE_OMP_CRITICAL:
3334 tree this_stmt_name
3335 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3336 for (; ctx != NULL; ctx = ctx->outer)
3337 if (gomp_critical *other_crit
3338 = dyn_cast <gomp_critical *> (ctx->stmt))
3339 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3341 error_at (gimple_location (stmt),
3342 "%<critical%> region may not be nested inside "
3343 "a %<critical%> region with the same name");
3344 return false;
3347 break;
3348 case GIMPLE_OMP_TEAMS:
3349 if (ctx == NULL)
3350 break;
3351 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3352 || (gimple_omp_target_kind (ctx->stmt)
3353 != GF_OMP_TARGET_KIND_REGION))
3355 /* Teams construct can appear either strictly nested inside of
3356 target construct with no intervening stmts, or can be encountered
3357 only by initial task (so must not appear inside any OpenMP
3358 construct. */
3359 error_at (gimple_location (stmt),
3360 "%<teams%> construct must be closely nested inside of "
3361 "%<target%> construct or not nested in any OpenMP "
3362 "construct");
3363 return false;
3365 break;
3366 case GIMPLE_OMP_TARGET:
3367 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3368 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3369 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3370 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3372 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3373 error_at (OMP_CLAUSE_LOCATION (c),
3374 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3375 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3376 return false;
3378 if (is_gimple_omp_offloaded (stmt)
3379 && oacc_get_fn_attrib (cfun->decl) != NULL)
3381 error_at (gimple_location (stmt),
3382 "OpenACC region inside of OpenACC routine, nested "
3383 "parallelism not supported yet");
3384 return false;
3386 for (; ctx != NULL; ctx = ctx->outer)
3388 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3390 if (is_gimple_omp (stmt)
3391 && is_gimple_omp_oacc (stmt)
3392 && is_gimple_omp (ctx->stmt))
3394 error_at (gimple_location (stmt),
3395 "OpenACC construct inside of non-OpenACC region");
3396 return false;
3398 continue;
3401 const char *stmt_name, *ctx_stmt_name;
3402 switch (gimple_omp_target_kind (stmt))
3404 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3405 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3406 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3407 case GF_OMP_TARGET_KIND_ENTER_DATA:
3408 stmt_name = "target enter data"; break;
3409 case GF_OMP_TARGET_KIND_EXIT_DATA:
3410 stmt_name = "target exit data"; break;
3411 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3412 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3413 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3414 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3415 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3416 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3417 stmt_name = "enter/exit data"; break;
3418 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3419 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3420 break;
3421 default: gcc_unreachable ();
3423 switch (gimple_omp_target_kind (ctx->stmt))
3425 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3426 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3427 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3428 ctx_stmt_name = "parallel"; break;
3429 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3430 ctx_stmt_name = "kernels"; break;
3431 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3432 ctx_stmt_name = "serial"; break;
3433 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3434 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3435 ctx_stmt_name = "host_data"; break;
3436 default: gcc_unreachable ();
3439 /* OpenACC/OpenMP mismatch? */
3440 if (is_gimple_omp_oacc (stmt)
3441 != is_gimple_omp_oacc (ctx->stmt))
3443 error_at (gimple_location (stmt),
3444 "%s %qs construct inside of %s %qs region",
3445 (is_gimple_omp_oacc (stmt)
3446 ? "OpenACC" : "OpenMP"), stmt_name,
3447 (is_gimple_omp_oacc (ctx->stmt)
3448 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3449 return false;
3451 if (is_gimple_omp_offloaded (ctx->stmt))
3453 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3454 if (is_gimple_omp_oacc (ctx->stmt))
3456 error_at (gimple_location (stmt),
3457 "%qs construct inside of %qs region",
3458 stmt_name, ctx_stmt_name);
3459 return false;
3461 else
3463 warning_at (gimple_location (stmt), 0,
3464 "%qs construct inside of %qs region",
3465 stmt_name, ctx_stmt_name);
3469 break;
3470 default:
3471 break;
3473 return true;
3477 /* Helper function scan_omp.
3479 Callback for walk_tree or operators in walk_gimple_stmt used to
3480 scan for OMP directives in TP. */
3482 static tree
3483 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3485 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3486 omp_context *ctx = (omp_context *) wi->info;
3487 tree t = *tp;
3489 switch (TREE_CODE (t))
3491 case VAR_DECL:
3492 case PARM_DECL:
3493 case LABEL_DECL:
3494 case RESULT_DECL:
3495 if (ctx)
3497 tree repl = remap_decl (t, &ctx->cb);
3498 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3499 *tp = repl;
3501 break;
3503 default:
3504 if (ctx && TYPE_P (t))
3505 *tp = remap_type (t, &ctx->cb);
3506 else if (!DECL_P (t))
3508 *walk_subtrees = 1;
3509 if (ctx)
3511 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3512 if (tem != TREE_TYPE (t))
3514 if (TREE_CODE (t) == INTEGER_CST)
3515 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3516 else
3517 TREE_TYPE (t) = tem;
3521 break;
3524 return NULL_TREE;
3527 /* Return true if FNDECL is a setjmp or a longjmp. */
3529 static bool
3530 setjmp_or_longjmp_p (const_tree fndecl)
3532 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3533 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3534 return true;
3536 tree declname = DECL_NAME (fndecl);
3537 if (!declname
3538 || (DECL_CONTEXT (fndecl) != NULL_TREE
3539 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3540 || !TREE_PUBLIC (fndecl))
3541 return false;
3543 const char *name = IDENTIFIER_POINTER (declname);
3544 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3547 /* Return true if FNDECL is an omp_* runtime API call. */
3549 static bool
3550 omp_runtime_api_call (const_tree fndecl)
3552 tree declname = DECL_NAME (fndecl);
3553 if (!declname
3554 || (DECL_CONTEXT (fndecl) != NULL_TREE
3555 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3556 || !TREE_PUBLIC (fndecl))
3557 return false;
3559 const char *name = IDENTIFIER_POINTER (declname);
3560 if (strncmp (name, "omp_", 4) != 0)
3561 return false;
3563 static const char *omp_runtime_apis[] =
3565 /* This array has 3 sections. First omp_* calls that don't
3566 have any suffixes. */
3567 "target_alloc",
3568 "target_associate_ptr",
3569 "target_disassociate_ptr",
3570 "target_free",
3571 "target_is_present",
3572 "target_memcpy",
3573 "target_memcpy_rect",
3574 NULL,
3575 /* Now omp_* calls that are available as omp_* and omp_*_. */
3576 "capture_affinity",
3577 "destroy_lock",
3578 "destroy_nest_lock",
3579 "display_affinity",
3580 "get_active_level",
3581 "get_affinity_format",
3582 "get_cancellation",
3583 "get_default_device",
3584 "get_dynamic",
3585 "get_initial_device",
3586 "get_level",
3587 "get_max_active_levels",
3588 "get_max_task_priority",
3589 "get_max_threads",
3590 "get_nested",
3591 "get_num_devices",
3592 "get_num_places",
3593 "get_num_procs",
3594 "get_num_teams",
3595 "get_num_threads",
3596 "get_partition_num_places",
3597 "get_place_num",
3598 "get_proc_bind",
3599 "get_team_num",
3600 "get_thread_limit",
3601 "get_thread_num",
3602 "get_wtick",
3603 "get_wtime",
3604 "in_final",
3605 "in_parallel",
3606 "init_lock",
3607 "init_nest_lock",
3608 "is_initial_device",
3609 "pause_resource",
3610 "pause_resource_all",
3611 "set_affinity_format",
3612 "set_lock",
3613 "set_nest_lock",
3614 "test_lock",
3615 "test_nest_lock",
3616 "unset_lock",
3617 "unset_nest_lock",
3618 NULL,
3619 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3620 "get_ancestor_thread_num",
3621 "get_partition_place_nums",
3622 "get_place_num_procs",
3623 "get_place_proc_ids",
3624 "get_schedule",
3625 "get_team_size",
3626 "set_default_device",
3627 "set_dynamic",
3628 "set_max_active_levels",
3629 "set_nested",
3630 "set_num_threads",
3631 "set_schedule"
3634 int mode = 0;
3635 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3637 if (omp_runtime_apis[i] == NULL)
3639 mode++;
3640 continue;
3642 size_t len = strlen (omp_runtime_apis[i]);
3643 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3644 && (name[4 + len] == '\0'
3645 || (mode > 0
3646 && name[4 + len] == '_'
3647 && (name[4 + len + 1] == '\0'
3648 || (mode > 1
3649 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3650 return true;
3652 return false;
3655 /* Helper function for scan_omp.
3657 Callback for walk_gimple_stmt used to scan for OMP directives in
3658 the current statement in GSI. */
3660 static tree
3661 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3662 struct walk_stmt_info *wi)
3664 gimple *stmt = gsi_stmt (*gsi);
3665 omp_context *ctx = (omp_context *) wi->info;
3667 if (gimple_has_location (stmt))
3668 input_location = gimple_location (stmt);
3670 /* Check the nesting restrictions. */
3671 bool remove = false;
3672 if (is_gimple_omp (stmt))
3673 remove = !check_omp_nesting_restrictions (stmt, ctx);
3674 else if (is_gimple_call (stmt))
3676 tree fndecl = gimple_call_fndecl (stmt);
3677 if (fndecl)
3679 if (ctx
3680 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3681 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3682 && setjmp_or_longjmp_p (fndecl)
3683 && !ctx->loop_p)
3685 remove = true;
3686 error_at (gimple_location (stmt),
3687 "setjmp/longjmp inside %<simd%> construct");
3689 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3690 switch (DECL_FUNCTION_CODE (fndecl))
3692 case BUILT_IN_GOMP_BARRIER:
3693 case BUILT_IN_GOMP_CANCEL:
3694 case BUILT_IN_GOMP_CANCELLATION_POINT:
3695 case BUILT_IN_GOMP_TASKYIELD:
3696 case BUILT_IN_GOMP_TASKWAIT:
3697 case BUILT_IN_GOMP_TASKGROUP_START:
3698 case BUILT_IN_GOMP_TASKGROUP_END:
3699 remove = !check_omp_nesting_restrictions (stmt, ctx);
3700 break;
3701 default:
3702 break;
3704 else if (ctx)
3706 omp_context *octx = ctx;
3707 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3708 octx = ctx->outer;
3709 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3711 remove = true;
3712 error_at (gimple_location (stmt),
3713 "OpenMP runtime API call %qD in a region with "
3714 "%<order(concurrent)%> clause", fndecl);
3719 if (remove)
3721 stmt = gimple_build_nop ();
3722 gsi_replace (gsi, stmt, false);
3725 *handled_ops_p = true;
3727 switch (gimple_code (stmt))
3729 case GIMPLE_OMP_PARALLEL:
3730 taskreg_nesting_level++;
3731 scan_omp_parallel (gsi, ctx);
3732 taskreg_nesting_level--;
3733 break;
3735 case GIMPLE_OMP_TASK:
3736 taskreg_nesting_level++;
3737 scan_omp_task (gsi, ctx);
3738 taskreg_nesting_level--;
3739 break;
3741 case GIMPLE_OMP_FOR:
3742 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3743 == GF_OMP_FOR_KIND_SIMD)
3744 && gimple_omp_for_combined_into_p (stmt)
3745 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3747 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3748 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3749 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3751 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3752 break;
3755 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3756 == GF_OMP_FOR_KIND_SIMD)
3757 && omp_maybe_offloaded_ctx (ctx)
3758 && omp_max_simt_vf ()
3759 && gimple_omp_for_collapse (stmt) == 1)
3760 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3761 else
3762 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3763 break;
3765 case GIMPLE_OMP_SECTIONS:
3766 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3767 break;
3769 case GIMPLE_OMP_SINGLE:
3770 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3771 break;
3773 case GIMPLE_OMP_SCAN:
3774 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3776 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3777 ctx->scan_inclusive = true;
3778 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3779 ctx->scan_exclusive = true;
3781 /* FALLTHRU */
3782 case GIMPLE_OMP_SECTION:
3783 case GIMPLE_OMP_MASTER:
3784 case GIMPLE_OMP_ORDERED:
3785 case GIMPLE_OMP_CRITICAL:
3786 ctx = new_omp_context (stmt, ctx);
3787 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3788 break;
3790 case GIMPLE_OMP_TASKGROUP:
3791 ctx = new_omp_context (stmt, ctx);
3792 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3793 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3794 break;
3796 case GIMPLE_OMP_TARGET:
3797 if (is_gimple_omp_offloaded (stmt))
3799 taskreg_nesting_level++;
3800 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3801 taskreg_nesting_level--;
3803 else
3804 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3805 break;
3807 case GIMPLE_OMP_TEAMS:
3808 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3810 taskreg_nesting_level++;
3811 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3812 taskreg_nesting_level--;
3814 else
3815 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3816 break;
3818 case GIMPLE_BIND:
3820 tree var;
3822 *handled_ops_p = false;
3823 if (ctx)
3824 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3825 var ;
3826 var = DECL_CHAIN (var))
3827 insert_decl_map (&ctx->cb, var, var);
3829 break;
3830 default:
3831 *handled_ops_p = false;
3832 break;
3835 return NULL_TREE;
3839 /* Scan all the statements starting at the current statement. CTX
3840 contains context information about the OMP directives and
3841 clauses found during the scan. */
3843 static void
3844 scan_omp (gimple_seq *body_p, omp_context *ctx)
3846 location_t saved_location;
3847 struct walk_stmt_info wi;
3849 memset (&wi, 0, sizeof (wi));
3850 wi.info = ctx;
3851 wi.want_locations = true;
3853 saved_location = input_location;
3854 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3855 input_location = saved_location;
3858 /* Re-gimplification and code generation routines. */
3860 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3861 of BIND if in a method. */
3863 static void
3864 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3866 if (DECL_ARGUMENTS (current_function_decl)
3867 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3868 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3869 == POINTER_TYPE))
3871 tree vars = gimple_bind_vars (bind);
3872 for (tree *pvar = &vars; *pvar; )
3873 if (omp_member_access_dummy_var (*pvar))
3874 *pvar = DECL_CHAIN (*pvar);
3875 else
3876 pvar = &DECL_CHAIN (*pvar);
3877 gimple_bind_set_vars (bind, vars);
3881 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3882 block and its subblocks. */
3884 static void
3885 remove_member_access_dummy_vars (tree block)
3887 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3888 if (omp_member_access_dummy_var (*pvar))
3889 *pvar = DECL_CHAIN (*pvar);
3890 else
3891 pvar = &DECL_CHAIN (*pvar);
3893 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3894 remove_member_access_dummy_vars (block);
3897 /* If a context was created for STMT when it was scanned, return it. */
3899 static omp_context *
3900 maybe_lookup_ctx (gimple *stmt)
3902 splay_tree_node n;
3903 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3904 return n ? (omp_context *) n->value : NULL;
3908 /* Find the mapping for DECL in CTX or the immediately enclosing
3909 context that has a mapping for DECL.
3911 If CTX is a nested parallel directive, we may have to use the decl
3912 mappings created in CTX's parent context. Suppose that we have the
3913 following parallel nesting (variable UIDs showed for clarity):
3915 iD.1562 = 0;
3916 #omp parallel shared(iD.1562) -> outer parallel
3917 iD.1562 = iD.1562 + 1;
3919 #omp parallel shared (iD.1562) -> inner parallel
3920 iD.1562 = iD.1562 - 1;
3922 Each parallel structure will create a distinct .omp_data_s structure
3923 for copying iD.1562 in/out of the directive:
3925 outer parallel .omp_data_s.1.i -> iD.1562
3926 inner parallel .omp_data_s.2.i -> iD.1562
3928 A shared variable mapping will produce a copy-out operation before
3929 the parallel directive and a copy-in operation after it. So, in
3930 this case we would have:
3932 iD.1562 = 0;
3933 .omp_data_o.1.i = iD.1562;
3934 #omp parallel shared(iD.1562) -> outer parallel
3935 .omp_data_i.1 = &.omp_data_o.1
3936 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3938 .omp_data_o.2.i = iD.1562; -> **
3939 #omp parallel shared(iD.1562) -> inner parallel
3940 .omp_data_i.2 = &.omp_data_o.2
3941 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3944 ** This is a problem. The symbol iD.1562 cannot be referenced
3945 inside the body of the outer parallel region. But since we are
3946 emitting this copy operation while expanding the inner parallel
3947 directive, we need to access the CTX structure of the outer
3948 parallel directive to get the correct mapping:
3950 .omp_data_o.2.i = .omp_data_i.1->i
3952 Since there may be other workshare or parallel directives enclosing
3953 the parallel directive, it may be necessary to walk up the context
3954 parent chain. This is not a problem in general because nested
3955 parallelism happens only rarely. */
3957 static tree
3958 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3960 tree t;
3961 omp_context *up;
3963 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3964 t = maybe_lookup_decl (decl, up);
3966 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3968 return t ? t : decl;
3972 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3973 in outer contexts. */
3975 static tree
3976 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3978 tree t = NULL;
3979 omp_context *up;
3981 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3982 t = maybe_lookup_decl (decl, up);
3984 return t ? t : decl;
3988 /* Construct the initialization value for reduction operation OP. */
3990 tree
3991 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3993 switch (op)
3995 case PLUS_EXPR:
3996 case MINUS_EXPR:
3997 case BIT_IOR_EXPR:
3998 case BIT_XOR_EXPR:
3999 case TRUTH_OR_EXPR:
4000 case TRUTH_ORIF_EXPR:
4001 case TRUTH_XOR_EXPR:
4002 case NE_EXPR:
4003 return build_zero_cst (type);
4005 case MULT_EXPR:
4006 case TRUTH_AND_EXPR:
4007 case TRUTH_ANDIF_EXPR:
4008 case EQ_EXPR:
4009 return fold_convert_loc (loc, type, integer_one_node);
4011 case BIT_AND_EXPR:
4012 return fold_convert_loc (loc, type, integer_minus_one_node);
4014 case MAX_EXPR:
4015 if (SCALAR_FLOAT_TYPE_P (type))
4017 REAL_VALUE_TYPE max, min;
4018 if (HONOR_INFINITIES (type))
4020 real_inf (&max);
4021 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4023 else
4024 real_maxval (&min, 1, TYPE_MODE (type));
4025 return build_real (type, min);
4027 else if (POINTER_TYPE_P (type))
4029 wide_int min
4030 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4031 return wide_int_to_tree (type, min);
4033 else
4035 gcc_assert (INTEGRAL_TYPE_P (type));
4036 return TYPE_MIN_VALUE (type);
4039 case MIN_EXPR:
4040 if (SCALAR_FLOAT_TYPE_P (type))
4042 REAL_VALUE_TYPE max;
4043 if (HONOR_INFINITIES (type))
4044 real_inf (&max);
4045 else
4046 real_maxval (&max, 0, TYPE_MODE (type));
4047 return build_real (type, max);
4049 else if (POINTER_TYPE_P (type))
4051 wide_int max
4052 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4053 return wide_int_to_tree (type, max);
4055 else
4057 gcc_assert (INTEGRAL_TYPE_P (type));
4058 return TYPE_MAX_VALUE (type);
4061 default:
4062 gcc_unreachable ();
4066 /* Construct the initialization value for reduction CLAUSE. */
4068 tree
4069 omp_reduction_init (tree clause, tree type)
4071 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4072 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4075 /* Return alignment to be assumed for var in CLAUSE, which should be
4076 OMP_CLAUSE_ALIGNED. */
4078 static tree
4079 omp_clause_aligned_alignment (tree clause)
4081 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4082 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4084 /* Otherwise return implementation defined alignment. */
4085 unsigned int al = 1;
4086 opt_scalar_mode mode_iter;
4087 auto_vector_modes modes;
4088 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4089 static enum mode_class classes[]
4090 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4091 for (int i = 0; i < 4; i += 2)
4092 /* The for loop above dictates that we only walk through scalar classes. */
4093 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4095 scalar_mode mode = mode_iter.require ();
4096 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4097 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4098 continue;
4099 machine_mode alt_vmode;
4100 for (unsigned int j = 0; j < modes.length (); ++j)
4101 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4102 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4103 vmode = alt_vmode;
4105 tree type = lang_hooks.types.type_for_mode (mode, 1);
4106 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4107 continue;
4108 type = build_vector_type_for_mode (type, vmode);
4109 if (TYPE_MODE (type) != vmode)
4110 continue;
4111 if (TYPE_ALIGN_UNIT (type) > al)
4112 al = TYPE_ALIGN_UNIT (type);
4114 return build_int_cst (integer_type_node, al);
4118 /* This structure is part of the interface between lower_rec_simd_input_clauses
4119 and lower_rec_input_clauses. */
4121 class omplow_simd_context {
4122 public:
4123 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4124 tree idx;
4125 tree lane;
4126 tree lastlane;
4127 vec<tree, va_heap> simt_eargs;
4128 gimple_seq simt_dlist;
4129 poly_uint64_pod max_vf;
4130 bool is_simt;
4133 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4134 privatization. */
4136 static bool
4137 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4138 omplow_simd_context *sctx, tree &ivar,
4139 tree &lvar, tree *rvar = NULL,
4140 tree *rvar2 = NULL)
4142 if (known_eq (sctx->max_vf, 0U))
4144 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4145 if (maybe_gt (sctx->max_vf, 1U))
4147 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4148 OMP_CLAUSE_SAFELEN);
4149 if (c)
4151 poly_uint64 safe_len;
4152 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4153 || maybe_lt (safe_len, 1U))
4154 sctx->max_vf = 1;
4155 else
4156 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4159 if (maybe_gt (sctx->max_vf, 1U))
4161 sctx->idx = create_tmp_var (unsigned_type_node);
4162 sctx->lane = create_tmp_var (unsigned_type_node);
4165 if (known_eq (sctx->max_vf, 1U))
4166 return false;
4168 if (sctx->is_simt)
4170 if (is_gimple_reg (new_var))
4172 ivar = lvar = new_var;
4173 return true;
4175 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4176 ivar = lvar = create_tmp_var (type);
4177 TREE_ADDRESSABLE (ivar) = 1;
4178 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4179 NULL, DECL_ATTRIBUTES (ivar));
4180 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4181 tree clobber = build_clobber (type);
4182 gimple *g = gimple_build_assign (ivar, clobber);
4183 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4185 else
4187 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4188 tree avar = create_tmp_var_raw (atype);
4189 if (TREE_ADDRESSABLE (new_var))
4190 TREE_ADDRESSABLE (avar) = 1;
4191 DECL_ATTRIBUTES (avar)
4192 = tree_cons (get_identifier ("omp simd array"), NULL,
4193 DECL_ATTRIBUTES (avar));
4194 gimple_add_tmp_var (avar);
4195 tree iavar = avar;
4196 if (rvar && !ctx->for_simd_scan_phase)
4198 /* For inscan reductions, create another array temporary,
4199 which will hold the reduced value. */
4200 iavar = create_tmp_var_raw (atype);
4201 if (TREE_ADDRESSABLE (new_var))
4202 TREE_ADDRESSABLE (iavar) = 1;
4203 DECL_ATTRIBUTES (iavar)
4204 = tree_cons (get_identifier ("omp simd array"), NULL,
4205 tree_cons (get_identifier ("omp simd inscan"), NULL,
4206 DECL_ATTRIBUTES (iavar)));
4207 gimple_add_tmp_var (iavar);
4208 ctx->cb.decl_map->put (avar, iavar);
4209 if (sctx->lastlane == NULL_TREE)
4210 sctx->lastlane = create_tmp_var (unsigned_type_node);
4211 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4212 sctx->lastlane, NULL_TREE, NULL_TREE);
4213 TREE_THIS_NOTRAP (*rvar) = 1;
4215 if (ctx->scan_exclusive)
4217 /* And for exclusive scan yet another one, which will
4218 hold the value during the scan phase. */
4219 tree savar = create_tmp_var_raw (atype);
4220 if (TREE_ADDRESSABLE (new_var))
4221 TREE_ADDRESSABLE (savar) = 1;
4222 DECL_ATTRIBUTES (savar)
4223 = tree_cons (get_identifier ("omp simd array"), NULL,
4224 tree_cons (get_identifier ("omp simd inscan "
4225 "exclusive"), NULL,
4226 DECL_ATTRIBUTES (savar)));
4227 gimple_add_tmp_var (savar);
4228 ctx->cb.decl_map->put (iavar, savar);
4229 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4230 sctx->idx, NULL_TREE, NULL_TREE);
4231 TREE_THIS_NOTRAP (*rvar2) = 1;
4234 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4235 NULL_TREE, NULL_TREE);
4236 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4237 NULL_TREE, NULL_TREE);
4238 TREE_THIS_NOTRAP (ivar) = 1;
4239 TREE_THIS_NOTRAP (lvar) = 1;
4241 if (DECL_P (new_var))
4243 SET_DECL_VALUE_EXPR (new_var, lvar);
4244 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4246 return true;
4249 /* Helper function of lower_rec_input_clauses. For a reference
4250 in simd reduction, add an underlying variable it will reference. */
4252 static void
4253 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4255 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4256 if (TREE_CONSTANT (z))
4258 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4259 get_name (new_vard));
4260 gimple_add_tmp_var (z);
4261 TREE_ADDRESSABLE (z) = 1;
4262 z = build_fold_addr_expr_loc (loc, z);
4263 gimplify_assign (new_vard, z, ilist);
4267 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4268 code to emit (type) (tskred_temp[idx]). */
4270 static tree
4271 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4272 unsigned idx)
4274 unsigned HOST_WIDE_INT sz
4275 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4276 tree r = build2 (MEM_REF, pointer_sized_int_node,
4277 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4278 idx * sz));
4279 tree v = create_tmp_var (pointer_sized_int_node);
4280 gimple *g = gimple_build_assign (v, r);
4281 gimple_seq_add_stmt (ilist, g);
4282 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4284 v = create_tmp_var (type);
4285 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4286 gimple_seq_add_stmt (ilist, g);
4288 return v;
4291 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4292 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4293 private variables. Initialization statements go in ILIST, while calls
4294 to destructors go in DLIST. */
4296 static void
4297 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4298 omp_context *ctx, struct omp_for_data *fd)
4300 tree c, copyin_seq, x, ptr;
4301 bool copyin_by_ref = false;
4302 bool lastprivate_firstprivate = false;
4303 bool reduction_omp_orig_ref = false;
4304 int pass;
4305 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4306 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4307 omplow_simd_context sctx = omplow_simd_context ();
4308 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4309 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4310 gimple_seq llist[4] = { };
4311 tree nonconst_simd_if = NULL_TREE;
4313 copyin_seq = NULL;
4314 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4316 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4317 with data sharing clauses referencing variable sized vars. That
4318 is unnecessarily hard to support and very unlikely to result in
4319 vectorized code anyway. */
4320 if (is_simd)
4321 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4322 switch (OMP_CLAUSE_CODE (c))
4324 case OMP_CLAUSE_LINEAR:
4325 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4326 sctx.max_vf = 1;
4327 /* FALLTHRU */
4328 case OMP_CLAUSE_PRIVATE:
4329 case OMP_CLAUSE_FIRSTPRIVATE:
4330 case OMP_CLAUSE_LASTPRIVATE:
4331 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4332 sctx.max_vf = 1;
4333 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4335 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4336 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4337 sctx.max_vf = 1;
4339 break;
4340 case OMP_CLAUSE_REDUCTION:
4341 case OMP_CLAUSE_IN_REDUCTION:
4342 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4343 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4344 sctx.max_vf = 1;
4345 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4347 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4348 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4349 sctx.max_vf = 1;
4351 break;
4352 case OMP_CLAUSE_IF:
4353 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4354 sctx.max_vf = 1;
4355 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4356 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4357 break;
4358 case OMP_CLAUSE_SIMDLEN:
4359 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4360 sctx.max_vf = 1;
4361 break;
4362 case OMP_CLAUSE__CONDTEMP_:
4363 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4364 if (sctx.is_simt)
4365 sctx.max_vf = 1;
4366 break;
4367 default:
4368 continue;
4371 /* Add a placeholder for simduid. */
4372 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4373 sctx.simt_eargs.safe_push (NULL_TREE);
4375 unsigned task_reduction_cnt = 0;
4376 unsigned task_reduction_cntorig = 0;
4377 unsigned task_reduction_cnt_full = 0;
4378 unsigned task_reduction_cntorig_full = 0;
4379 unsigned task_reduction_other_cnt = 0;
4380 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4381 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4382 /* Do all the fixed sized types in the first pass, and the variable sized
4383 types in the second pass. This makes sure that the scalar arguments to
4384 the variable sized types are processed before we use them in the
4385 variable sized operations. For task reductions we use 4 passes, in the
4386 first two we ignore them, in the third one gather arguments for
4387 GOMP_task_reduction_remap call and in the last pass actually handle
4388 the task reductions. */
4389 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4390 ? 4 : 2); ++pass)
4392 if (pass == 2 && task_reduction_cnt)
4394 tskred_atype
4395 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4396 + task_reduction_cntorig);
4397 tskred_avar = create_tmp_var_raw (tskred_atype);
4398 gimple_add_tmp_var (tskred_avar);
4399 TREE_ADDRESSABLE (tskred_avar) = 1;
4400 task_reduction_cnt_full = task_reduction_cnt;
4401 task_reduction_cntorig_full = task_reduction_cntorig;
4403 else if (pass == 3 && task_reduction_cnt)
4405 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4406 gimple *g
4407 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4408 size_int (task_reduction_cntorig),
4409 build_fold_addr_expr (tskred_avar));
4410 gimple_seq_add_stmt (ilist, g);
4412 if (pass == 3 && task_reduction_other_cnt)
4414 /* For reduction clauses, build
4415 tskred_base = (void *) tskred_temp[2]
4416 + omp_get_thread_num () * tskred_temp[1]
4417 or if tskred_temp[1] is known to be constant, that constant
4418 directly. This is the start of the private reduction copy block
4419 for the current thread. */
4420 tree v = create_tmp_var (integer_type_node);
4421 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4422 gimple *g = gimple_build_call (x, 0);
4423 gimple_call_set_lhs (g, v);
4424 gimple_seq_add_stmt (ilist, g);
4425 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4426 tskred_temp = OMP_CLAUSE_DECL (c);
4427 if (is_taskreg_ctx (ctx))
4428 tskred_temp = lookup_decl (tskred_temp, ctx);
4429 tree v2 = create_tmp_var (sizetype);
4430 g = gimple_build_assign (v2, NOP_EXPR, v);
4431 gimple_seq_add_stmt (ilist, g);
4432 if (ctx->task_reductions[0])
4433 v = fold_convert (sizetype, ctx->task_reductions[0]);
4434 else
4435 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4436 tree v3 = create_tmp_var (sizetype);
4437 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4438 gimple_seq_add_stmt (ilist, g);
4439 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4440 tskred_base = create_tmp_var (ptr_type_node);
4441 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4442 gimple_seq_add_stmt (ilist, g);
4444 task_reduction_cnt = 0;
4445 task_reduction_cntorig = 0;
4446 task_reduction_other_cnt = 0;
4447 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4449 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4450 tree var, new_var;
4451 bool by_ref;
4452 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4453 bool task_reduction_p = false;
4454 bool task_reduction_needs_orig_p = false;
4455 tree cond = NULL_TREE;
4457 switch (c_kind)
4459 case OMP_CLAUSE_PRIVATE:
4460 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4461 continue;
4462 break;
4463 case OMP_CLAUSE_SHARED:
4464 /* Ignore shared directives in teams construct inside
4465 of target construct. */
4466 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4467 && !is_host_teams_ctx (ctx))
4468 continue;
4469 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4471 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4472 || is_global_var (OMP_CLAUSE_DECL (c)));
4473 continue;
4475 case OMP_CLAUSE_FIRSTPRIVATE:
4476 case OMP_CLAUSE_COPYIN:
4477 break;
4478 case OMP_CLAUSE_LINEAR:
4479 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4480 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4481 lastprivate_firstprivate = true;
4482 break;
4483 case OMP_CLAUSE_REDUCTION:
4484 case OMP_CLAUSE_IN_REDUCTION:
4485 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4487 task_reduction_p = true;
4488 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4490 task_reduction_other_cnt++;
4491 if (pass == 2)
4492 continue;
4494 else
4495 task_reduction_cnt++;
4496 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4498 var = OMP_CLAUSE_DECL (c);
4499 /* If var is a global variable that isn't privatized
4500 in outer contexts, we don't need to look up the
4501 original address, it is always the address of the
4502 global variable itself. */
4503 if (!DECL_P (var)
4504 || omp_is_reference (var)
4505 || !is_global_var
4506 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4508 task_reduction_needs_orig_p = true;
4509 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4510 task_reduction_cntorig++;
4514 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4515 reduction_omp_orig_ref = true;
4516 break;
4517 case OMP_CLAUSE__REDUCTEMP_:
4518 if (!is_taskreg_ctx (ctx))
4519 continue;
4520 /* FALLTHRU */
4521 case OMP_CLAUSE__LOOPTEMP_:
4522 /* Handle _looptemp_/_reductemp_ clauses only on
4523 parallel/task. */
4524 if (fd)
4525 continue;
4526 break;
4527 case OMP_CLAUSE_LASTPRIVATE:
4528 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4530 lastprivate_firstprivate = true;
4531 if (pass != 0 || is_taskloop_ctx (ctx))
4532 continue;
4534 /* Even without corresponding firstprivate, if
4535 decl is Fortran allocatable, it needs outer var
4536 reference. */
4537 else if (pass == 0
4538 && lang_hooks.decls.omp_private_outer_ref
4539 (OMP_CLAUSE_DECL (c)))
4540 lastprivate_firstprivate = true;
4541 break;
4542 case OMP_CLAUSE_ALIGNED:
4543 if (pass != 1)
4544 continue;
4545 var = OMP_CLAUSE_DECL (c);
4546 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4547 && !is_global_var (var))
4549 new_var = maybe_lookup_decl (var, ctx);
4550 if (new_var == NULL_TREE)
4551 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4552 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4553 tree alarg = omp_clause_aligned_alignment (c);
4554 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4555 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4556 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4557 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4558 gimplify_and_add (x, ilist);
4560 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4561 && is_global_var (var))
4563 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4564 new_var = lookup_decl (var, ctx);
4565 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4566 t = build_fold_addr_expr_loc (clause_loc, t);
4567 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4568 tree alarg = omp_clause_aligned_alignment (c);
4569 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4570 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4571 t = fold_convert_loc (clause_loc, ptype, t);
4572 x = create_tmp_var (ptype);
4573 t = build2 (MODIFY_EXPR, ptype, x, t);
4574 gimplify_and_add (t, ilist);
4575 t = build_simple_mem_ref_loc (clause_loc, x);
4576 SET_DECL_VALUE_EXPR (new_var, t);
4577 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4579 continue;
4580 case OMP_CLAUSE__CONDTEMP_:
4581 if (is_parallel_ctx (ctx)
4582 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4583 break;
4584 continue;
4585 default:
4586 continue;
4589 if (task_reduction_p != (pass >= 2))
4590 continue;
4592 new_var = var = OMP_CLAUSE_DECL (c);
4593 if ((c_kind == OMP_CLAUSE_REDUCTION
4594 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4595 && TREE_CODE (var) == MEM_REF)
4597 var = TREE_OPERAND (var, 0);
4598 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4599 var = TREE_OPERAND (var, 0);
4600 if (TREE_CODE (var) == INDIRECT_REF
4601 || TREE_CODE (var) == ADDR_EXPR)
4602 var = TREE_OPERAND (var, 0);
4603 if (is_variable_sized (var))
4605 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4606 var = DECL_VALUE_EXPR (var);
4607 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4608 var = TREE_OPERAND (var, 0);
4609 gcc_assert (DECL_P (var));
4611 new_var = var;
4613 if (c_kind != OMP_CLAUSE_COPYIN)
4614 new_var = lookup_decl (var, ctx);
4616 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4618 if (pass != 0)
4619 continue;
4621 /* C/C++ array section reductions. */
4622 else if ((c_kind == OMP_CLAUSE_REDUCTION
4623 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4624 && var != OMP_CLAUSE_DECL (c))
4626 if (pass == 0)
4627 continue;
4629 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4630 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4632 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4634 tree b = TREE_OPERAND (orig_var, 1);
4635 b = maybe_lookup_decl (b, ctx);
4636 if (b == NULL)
4638 b = TREE_OPERAND (orig_var, 1);
4639 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4641 if (integer_zerop (bias))
4642 bias = b;
4643 else
4645 bias = fold_convert_loc (clause_loc,
4646 TREE_TYPE (b), bias);
4647 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4648 TREE_TYPE (b), b, bias);
4650 orig_var = TREE_OPERAND (orig_var, 0);
4652 if (pass == 2)
4654 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4655 if (is_global_var (out)
4656 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4657 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4658 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4659 != POINTER_TYPE)))
4660 x = var;
4661 else
4663 bool by_ref = use_pointer_for_field (var, NULL);
4664 x = build_receiver_ref (var, by_ref, ctx);
4665 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4666 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4667 == POINTER_TYPE))
4668 x = build_fold_addr_expr (x);
4670 if (TREE_CODE (orig_var) == INDIRECT_REF)
4671 x = build_simple_mem_ref (x);
4672 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4674 if (var == TREE_OPERAND (orig_var, 0))
4675 x = build_fold_addr_expr (x);
4677 bias = fold_convert (sizetype, bias);
4678 x = fold_convert (ptr_type_node, x);
4679 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4680 TREE_TYPE (x), x, bias);
4681 unsigned cnt = task_reduction_cnt - 1;
4682 if (!task_reduction_needs_orig_p)
4683 cnt += (task_reduction_cntorig_full
4684 - task_reduction_cntorig);
4685 else
4686 cnt = task_reduction_cntorig - 1;
4687 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4688 size_int (cnt), NULL_TREE, NULL_TREE);
4689 gimplify_assign (r, x, ilist);
4690 continue;
4693 if (TREE_CODE (orig_var) == INDIRECT_REF
4694 || TREE_CODE (orig_var) == ADDR_EXPR)
4695 orig_var = TREE_OPERAND (orig_var, 0);
4696 tree d = OMP_CLAUSE_DECL (c);
4697 tree type = TREE_TYPE (d);
4698 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4699 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4700 const char *name = get_name (orig_var);
4701 if (pass == 3)
4703 tree xv = create_tmp_var (ptr_type_node);
4704 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4706 unsigned cnt = task_reduction_cnt - 1;
4707 if (!task_reduction_needs_orig_p)
4708 cnt += (task_reduction_cntorig_full
4709 - task_reduction_cntorig);
4710 else
4711 cnt = task_reduction_cntorig - 1;
4712 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4713 size_int (cnt), NULL_TREE, NULL_TREE);
4715 gimple *g = gimple_build_assign (xv, x);
4716 gimple_seq_add_stmt (ilist, g);
4718 else
4720 unsigned int idx = *ctx->task_reduction_map->get (c);
4721 tree off;
4722 if (ctx->task_reductions[1 + idx])
4723 off = fold_convert (sizetype,
4724 ctx->task_reductions[1 + idx]);
4725 else
4726 off = task_reduction_read (ilist, tskred_temp, sizetype,
4727 7 + 3 * idx + 1);
4728 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4729 tskred_base, off);
4730 gimple_seq_add_stmt (ilist, g);
4732 x = fold_convert (build_pointer_type (boolean_type_node),
4733 xv);
4734 if (TREE_CONSTANT (v))
4735 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4736 TYPE_SIZE_UNIT (type));
4737 else
4739 tree t = maybe_lookup_decl (v, ctx);
4740 if (t)
4741 v = t;
4742 else
4743 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4744 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4745 fb_rvalue);
4746 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4747 TREE_TYPE (v), v,
4748 build_int_cst (TREE_TYPE (v), 1));
4749 t = fold_build2_loc (clause_loc, MULT_EXPR,
4750 TREE_TYPE (v), t,
4751 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4752 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4754 cond = create_tmp_var (TREE_TYPE (x));
4755 gimplify_assign (cond, x, ilist);
4756 x = xv;
4758 else if (TREE_CONSTANT (v))
4760 x = create_tmp_var_raw (type, name);
4761 gimple_add_tmp_var (x);
4762 TREE_ADDRESSABLE (x) = 1;
4763 x = build_fold_addr_expr_loc (clause_loc, x);
4765 else
4767 tree atmp
4768 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4769 tree t = maybe_lookup_decl (v, ctx);
4770 if (t)
4771 v = t;
4772 else
4773 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4774 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4775 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4776 TREE_TYPE (v), v,
4777 build_int_cst (TREE_TYPE (v), 1));
4778 t = fold_build2_loc (clause_loc, MULT_EXPR,
4779 TREE_TYPE (v), t,
4780 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4781 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4782 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4785 tree ptype = build_pointer_type (TREE_TYPE (type));
4786 x = fold_convert_loc (clause_loc, ptype, x);
4787 tree y = create_tmp_var (ptype, name);
4788 gimplify_assign (y, x, ilist);
4789 x = y;
4790 tree yb = y;
4792 if (!integer_zerop (bias))
4794 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4795 bias);
4796 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4798 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4799 pointer_sized_int_node, yb, bias);
4800 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4801 yb = create_tmp_var (ptype, name);
4802 gimplify_assign (yb, x, ilist);
4803 x = yb;
4806 d = TREE_OPERAND (d, 0);
4807 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4808 d = TREE_OPERAND (d, 0);
4809 if (TREE_CODE (d) == ADDR_EXPR)
4811 if (orig_var != var)
4813 gcc_assert (is_variable_sized (orig_var));
4814 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4816 gimplify_assign (new_var, x, ilist);
4817 tree new_orig_var = lookup_decl (orig_var, ctx);
4818 tree t = build_fold_indirect_ref (new_var);
4819 DECL_IGNORED_P (new_var) = 0;
4820 TREE_THIS_NOTRAP (t) = 1;
4821 SET_DECL_VALUE_EXPR (new_orig_var, t);
4822 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4824 else
4826 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4827 build_int_cst (ptype, 0));
4828 SET_DECL_VALUE_EXPR (new_var, x);
4829 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4832 else
4834 gcc_assert (orig_var == var);
4835 if (TREE_CODE (d) == INDIRECT_REF)
4837 x = create_tmp_var (ptype, name);
4838 TREE_ADDRESSABLE (x) = 1;
4839 gimplify_assign (x, yb, ilist);
4840 x = build_fold_addr_expr_loc (clause_loc, x);
4842 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4843 gimplify_assign (new_var, x, ilist);
4845 /* GOMP_taskgroup_reduction_register memsets the whole
4846 array to zero. If the initializer is zero, we don't
4847 need to initialize it again, just mark it as ever
4848 used unconditionally, i.e. cond = true. */
4849 if (cond
4850 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4851 && initializer_zerop (omp_reduction_init (c,
4852 TREE_TYPE (type))))
4854 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4855 boolean_true_node);
4856 gimple_seq_add_stmt (ilist, g);
4857 continue;
4859 tree end = create_artificial_label (UNKNOWN_LOCATION);
4860 if (cond)
4862 gimple *g;
4863 if (!is_parallel_ctx (ctx))
4865 tree condv = create_tmp_var (boolean_type_node);
4866 g = gimple_build_assign (condv,
4867 build_simple_mem_ref (cond));
4868 gimple_seq_add_stmt (ilist, g);
4869 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4870 g = gimple_build_cond (NE_EXPR, condv,
4871 boolean_false_node, end, lab1);
4872 gimple_seq_add_stmt (ilist, g);
4873 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4875 g = gimple_build_assign (build_simple_mem_ref (cond),
4876 boolean_true_node);
4877 gimple_seq_add_stmt (ilist, g);
4880 tree y1 = create_tmp_var (ptype);
4881 gimplify_assign (y1, y, ilist);
4882 tree i2 = NULL_TREE, y2 = NULL_TREE;
4883 tree body2 = NULL_TREE, end2 = NULL_TREE;
4884 tree y3 = NULL_TREE, y4 = NULL_TREE;
4885 if (task_reduction_needs_orig_p)
4887 y3 = create_tmp_var (ptype);
4888 tree ref;
4889 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4890 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4891 size_int (task_reduction_cnt_full
4892 + task_reduction_cntorig - 1),
4893 NULL_TREE, NULL_TREE);
4894 else
4896 unsigned int idx = *ctx->task_reduction_map->get (c);
4897 ref = task_reduction_read (ilist, tskred_temp, ptype,
4898 7 + 3 * idx);
4900 gimplify_assign (y3, ref, ilist);
4902 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4904 if (pass != 3)
4906 y2 = create_tmp_var (ptype);
4907 gimplify_assign (y2, y, ilist);
4909 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4911 tree ref = build_outer_var_ref (var, ctx);
4912 /* For ref build_outer_var_ref already performs this. */
4913 if (TREE_CODE (d) == INDIRECT_REF)
4914 gcc_assert (omp_is_reference (var));
4915 else if (TREE_CODE (d) == ADDR_EXPR)
4916 ref = build_fold_addr_expr (ref);
4917 else if (omp_is_reference (var))
4918 ref = build_fold_addr_expr (ref);
4919 ref = fold_convert_loc (clause_loc, ptype, ref);
4920 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4921 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4923 y3 = create_tmp_var (ptype);
4924 gimplify_assign (y3, unshare_expr (ref), ilist);
4926 if (is_simd)
4928 y4 = create_tmp_var (ptype);
4929 gimplify_assign (y4, ref, dlist);
4933 tree i = create_tmp_var (TREE_TYPE (v));
4934 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4935 tree body = create_artificial_label (UNKNOWN_LOCATION);
4936 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4937 if (y2)
4939 i2 = create_tmp_var (TREE_TYPE (v));
4940 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4941 body2 = create_artificial_label (UNKNOWN_LOCATION);
4942 end2 = create_artificial_label (UNKNOWN_LOCATION);
4943 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4945 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4947 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4948 tree decl_placeholder
4949 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4950 SET_DECL_VALUE_EXPR (decl_placeholder,
4951 build_simple_mem_ref (y1));
4952 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4953 SET_DECL_VALUE_EXPR (placeholder,
4954 y3 ? build_simple_mem_ref (y3)
4955 : error_mark_node);
4956 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4957 x = lang_hooks.decls.omp_clause_default_ctor
4958 (c, build_simple_mem_ref (y1),
4959 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4960 if (x)
4961 gimplify_and_add (x, ilist);
4962 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4964 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4965 lower_omp (&tseq, ctx);
4966 gimple_seq_add_seq (ilist, tseq);
4968 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4969 if (is_simd)
4971 SET_DECL_VALUE_EXPR (decl_placeholder,
4972 build_simple_mem_ref (y2));
4973 SET_DECL_VALUE_EXPR (placeholder,
4974 build_simple_mem_ref (y4));
4975 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4976 lower_omp (&tseq, ctx);
4977 gimple_seq_add_seq (dlist, tseq);
4978 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4980 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4981 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4982 if (y2)
4984 x = lang_hooks.decls.omp_clause_dtor
4985 (c, build_simple_mem_ref (y2));
4986 if (x)
4987 gimplify_and_add (x, dlist);
4990 else
4992 x = omp_reduction_init (c, TREE_TYPE (type));
4993 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4995 /* reduction(-:var) sums up the partial results, so it
4996 acts identically to reduction(+:var). */
4997 if (code == MINUS_EXPR)
4998 code = PLUS_EXPR;
5000 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5001 if (is_simd)
5003 x = build2 (code, TREE_TYPE (type),
5004 build_simple_mem_ref (y4),
5005 build_simple_mem_ref (y2));
5006 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5009 gimple *g
5010 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5011 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5012 gimple_seq_add_stmt (ilist, g);
5013 if (y3)
5015 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5016 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5017 gimple_seq_add_stmt (ilist, g);
5019 g = gimple_build_assign (i, PLUS_EXPR, i,
5020 build_int_cst (TREE_TYPE (i), 1));
5021 gimple_seq_add_stmt (ilist, g);
5022 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5023 gimple_seq_add_stmt (ilist, g);
5024 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5025 if (y2)
5027 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5028 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5029 gimple_seq_add_stmt (dlist, g);
5030 if (y4)
5032 g = gimple_build_assign
5033 (y4, POINTER_PLUS_EXPR, y4,
5034 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5035 gimple_seq_add_stmt (dlist, g);
5037 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5038 build_int_cst (TREE_TYPE (i2), 1));
5039 gimple_seq_add_stmt (dlist, g);
5040 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5041 gimple_seq_add_stmt (dlist, g);
5042 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5044 continue;
5046 else if (pass == 2)
5048 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5049 x = var;
5050 else
5052 bool by_ref = use_pointer_for_field (var, ctx);
5053 x = build_receiver_ref (var, by_ref, ctx);
5055 if (!omp_is_reference (var))
5056 x = build_fold_addr_expr (x);
5057 x = fold_convert (ptr_type_node, x);
5058 unsigned cnt = task_reduction_cnt - 1;
5059 if (!task_reduction_needs_orig_p)
5060 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5061 else
5062 cnt = task_reduction_cntorig - 1;
5063 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5064 size_int (cnt), NULL_TREE, NULL_TREE);
5065 gimplify_assign (r, x, ilist);
5066 continue;
5068 else if (pass == 3)
5070 tree type = TREE_TYPE (new_var);
5071 if (!omp_is_reference (var))
5072 type = build_pointer_type (type);
5073 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5075 unsigned cnt = task_reduction_cnt - 1;
5076 if (!task_reduction_needs_orig_p)
5077 cnt += (task_reduction_cntorig_full
5078 - task_reduction_cntorig);
5079 else
5080 cnt = task_reduction_cntorig - 1;
5081 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5082 size_int (cnt), NULL_TREE, NULL_TREE);
5084 else
5086 unsigned int idx = *ctx->task_reduction_map->get (c);
5087 tree off;
5088 if (ctx->task_reductions[1 + idx])
5089 off = fold_convert (sizetype,
5090 ctx->task_reductions[1 + idx]);
5091 else
5092 off = task_reduction_read (ilist, tskred_temp, sizetype,
5093 7 + 3 * idx + 1);
5094 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5095 tskred_base, off);
5097 x = fold_convert (type, x);
5098 tree t;
5099 if (omp_is_reference (var))
5101 gimplify_assign (new_var, x, ilist);
5102 t = new_var;
5103 new_var = build_simple_mem_ref (new_var);
5105 else
5107 t = create_tmp_var (type);
5108 gimplify_assign (t, x, ilist);
5109 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5110 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5112 t = fold_convert (build_pointer_type (boolean_type_node), t);
5113 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5114 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5115 cond = create_tmp_var (TREE_TYPE (t));
5116 gimplify_assign (cond, t, ilist);
5118 else if (is_variable_sized (var))
5120 /* For variable sized types, we need to allocate the
5121 actual storage here. Call alloca and store the
5122 result in the pointer decl that we created elsewhere. */
5123 if (pass == 0)
5124 continue;
5126 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5128 gcall *stmt;
5129 tree tmp, atmp;
5131 ptr = DECL_VALUE_EXPR (new_var);
5132 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5133 ptr = TREE_OPERAND (ptr, 0);
5134 gcc_assert (DECL_P (ptr));
5135 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5137 /* void *tmp = __builtin_alloca */
5138 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5139 stmt = gimple_build_call (atmp, 2, x,
5140 size_int (DECL_ALIGN (var)));
5141 cfun->calls_alloca = 1;
5142 tmp = create_tmp_var_raw (ptr_type_node);
5143 gimple_add_tmp_var (tmp);
5144 gimple_call_set_lhs (stmt, tmp);
5146 gimple_seq_add_stmt (ilist, stmt);
5148 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5149 gimplify_assign (ptr, x, ilist);
5152 else if (omp_is_reference (var)
5153 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5154 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5156 /* For references that are being privatized for Fortran,
5157 allocate new backing storage for the new pointer
5158 variable. This allows us to avoid changing all the
5159 code that expects a pointer to something that expects
5160 a direct variable. */
5161 if (pass == 0)
5162 continue;
5164 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5165 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5167 x = build_receiver_ref (var, false, ctx);
5168 x = build_fold_addr_expr_loc (clause_loc, x);
5170 else if (TREE_CONSTANT (x))
5172 /* For reduction in SIMD loop, defer adding the
5173 initialization of the reference, because if we decide
5174 to use SIMD array for it, the initilization could cause
5175 expansion ICE. Ditto for other privatization clauses. */
5176 if (is_simd)
5177 x = NULL_TREE;
5178 else
5180 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5181 get_name (var));
5182 gimple_add_tmp_var (x);
5183 TREE_ADDRESSABLE (x) = 1;
5184 x = build_fold_addr_expr_loc (clause_loc, x);
5187 else
5189 tree atmp
5190 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5191 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5192 tree al = size_int (TYPE_ALIGN (rtype));
5193 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5196 if (x)
5198 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5199 gimplify_assign (new_var, x, ilist);
5202 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5204 else if ((c_kind == OMP_CLAUSE_REDUCTION
5205 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5206 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5208 if (pass == 0)
5209 continue;
5211 else if (pass != 0)
5212 continue;
5214 switch (OMP_CLAUSE_CODE (c))
5216 case OMP_CLAUSE_SHARED:
5217 /* Ignore shared directives in teams construct inside
5218 target construct. */
5219 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5220 && !is_host_teams_ctx (ctx))
5221 continue;
5222 /* Shared global vars are just accessed directly. */
5223 if (is_global_var (new_var))
5224 break;
5225 /* For taskloop firstprivate/lastprivate, represented
5226 as firstprivate and shared clause on the task, new_var
5227 is the firstprivate var. */
5228 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5229 break;
5230 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5231 needs to be delayed until after fixup_child_record_type so
5232 that we get the correct type during the dereference. */
5233 by_ref = use_pointer_for_field (var, ctx);
5234 x = build_receiver_ref (var, by_ref, ctx);
5235 SET_DECL_VALUE_EXPR (new_var, x);
5236 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5238 /* ??? If VAR is not passed by reference, and the variable
5239 hasn't been initialized yet, then we'll get a warning for
5240 the store into the omp_data_s structure. Ideally, we'd be
5241 able to notice this and not store anything at all, but
5242 we're generating code too early. Suppress the warning. */
5243 if (!by_ref)
5244 TREE_NO_WARNING (var) = 1;
5245 break;
5247 case OMP_CLAUSE__CONDTEMP_:
5248 if (is_parallel_ctx (ctx))
5250 x = build_receiver_ref (var, false, ctx);
5251 SET_DECL_VALUE_EXPR (new_var, x);
5252 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5254 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5256 x = build_zero_cst (TREE_TYPE (var));
5257 goto do_private;
5259 break;
5261 case OMP_CLAUSE_LASTPRIVATE:
5262 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5263 break;
5264 /* FALLTHRU */
5266 case OMP_CLAUSE_PRIVATE:
5267 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5268 x = build_outer_var_ref (var, ctx);
5269 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5271 if (is_task_ctx (ctx))
5272 x = build_receiver_ref (var, false, ctx);
5273 else
5274 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5276 else
5277 x = NULL;
5278 do_private:
5279 tree nx;
5280 bool copy_ctor;
5281 copy_ctor = false;
5282 nx = unshare_expr (new_var);
5283 if (is_simd
5284 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5285 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5286 copy_ctor = true;
5287 if (copy_ctor)
5288 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5289 else
5290 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5291 if (is_simd)
5293 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5294 if ((TREE_ADDRESSABLE (new_var) || nx || y
5295 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5296 && (gimple_omp_for_collapse (ctx->stmt) != 1
5297 || (gimple_omp_for_index (ctx->stmt, 0)
5298 != new_var)))
5299 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5300 || omp_is_reference (var))
5301 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5302 ivar, lvar))
5304 if (omp_is_reference (var))
5306 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5307 tree new_vard = TREE_OPERAND (new_var, 0);
5308 gcc_assert (DECL_P (new_vard));
5309 SET_DECL_VALUE_EXPR (new_vard,
5310 build_fold_addr_expr (lvar));
5311 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5314 if (nx)
5316 tree iv = unshare_expr (ivar);
5317 if (copy_ctor)
5318 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5320 else
5321 x = lang_hooks.decls.omp_clause_default_ctor (c,
5325 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5327 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5328 unshare_expr (ivar), x);
5329 nx = x;
5331 if (nx && x)
5332 gimplify_and_add (x, &llist[0]);
5333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5334 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5336 tree v = new_var;
5337 if (!DECL_P (v))
5339 gcc_assert (TREE_CODE (v) == MEM_REF);
5340 v = TREE_OPERAND (v, 0);
5341 gcc_assert (DECL_P (v));
5343 v = *ctx->lastprivate_conditional_map->get (v);
5344 tree t = create_tmp_var (TREE_TYPE (v));
5345 tree z = build_zero_cst (TREE_TYPE (v));
5346 tree orig_v
5347 = build_outer_var_ref (var, ctx,
5348 OMP_CLAUSE_LASTPRIVATE);
5349 gimple_seq_add_stmt (dlist,
5350 gimple_build_assign (t, z));
5351 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5352 tree civar = DECL_VALUE_EXPR (v);
5353 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5354 civar = unshare_expr (civar);
5355 TREE_OPERAND (civar, 1) = sctx.idx;
5356 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5357 unshare_expr (civar));
5358 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5359 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5360 orig_v, unshare_expr (ivar)));
5361 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5362 civar);
5363 x = build3 (COND_EXPR, void_type_node, cond, x,
5364 void_node);
5365 gimple_seq tseq = NULL;
5366 gimplify_and_add (x, &tseq);
5367 if (ctx->outer)
5368 lower_omp (&tseq, ctx->outer);
5369 gimple_seq_add_seq (&llist[1], tseq);
5371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5372 && ctx->for_simd_scan_phase)
5374 x = unshare_expr (ivar);
5375 tree orig_v
5376 = build_outer_var_ref (var, ctx,
5377 OMP_CLAUSE_LASTPRIVATE);
5378 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5379 orig_v);
5380 gimplify_and_add (x, &llist[0]);
5382 if (y)
5384 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5385 if (y)
5386 gimplify_and_add (y, &llist[1]);
5388 break;
5390 if (omp_is_reference (var))
5392 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5393 tree new_vard = TREE_OPERAND (new_var, 0);
5394 gcc_assert (DECL_P (new_vard));
5395 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5396 x = TYPE_SIZE_UNIT (type);
5397 if (TREE_CONSTANT (x))
5399 x = create_tmp_var_raw (type, get_name (var));
5400 gimple_add_tmp_var (x);
5401 TREE_ADDRESSABLE (x) = 1;
5402 x = build_fold_addr_expr_loc (clause_loc, x);
5403 x = fold_convert_loc (clause_loc,
5404 TREE_TYPE (new_vard), x);
5405 gimplify_assign (new_vard, x, ilist);
5409 if (nx)
5410 gimplify_and_add (nx, ilist);
5411 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5412 && is_simd
5413 && ctx->for_simd_scan_phase)
5415 tree orig_v = build_outer_var_ref (var, ctx,
5416 OMP_CLAUSE_LASTPRIVATE);
5417 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5418 orig_v);
5419 gimplify_and_add (x, ilist);
5421 /* FALLTHRU */
5423 do_dtor:
5424 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5425 if (x)
5426 gimplify_and_add (x, dlist);
5427 break;
5429 case OMP_CLAUSE_LINEAR:
5430 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5431 goto do_firstprivate;
5432 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5433 x = NULL;
5434 else
5435 x = build_outer_var_ref (var, ctx);
5436 goto do_private;
5438 case OMP_CLAUSE_FIRSTPRIVATE:
5439 if (is_task_ctx (ctx))
5441 if ((omp_is_reference (var)
5442 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5443 || is_variable_sized (var))
5444 goto do_dtor;
5445 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5446 ctx))
5447 || use_pointer_for_field (var, NULL))
5449 x = build_receiver_ref (var, false, ctx);
5450 SET_DECL_VALUE_EXPR (new_var, x);
5451 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5452 goto do_dtor;
5455 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5456 && omp_is_reference (var))
5458 x = build_outer_var_ref (var, ctx);
5459 gcc_assert (TREE_CODE (x) == MEM_REF
5460 && integer_zerop (TREE_OPERAND (x, 1)));
5461 x = TREE_OPERAND (x, 0);
5462 x = lang_hooks.decls.omp_clause_copy_ctor
5463 (c, unshare_expr (new_var), x);
5464 gimplify_and_add (x, ilist);
5465 goto do_dtor;
5467 do_firstprivate:
5468 x = build_outer_var_ref (var, ctx);
5469 if (is_simd)
5471 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5472 && gimple_omp_for_combined_into_p (ctx->stmt))
5474 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5475 tree stept = TREE_TYPE (t);
5476 tree ct = omp_find_clause (clauses,
5477 OMP_CLAUSE__LOOPTEMP_);
5478 gcc_assert (ct);
5479 tree l = OMP_CLAUSE_DECL (ct);
5480 tree n1 = fd->loop.n1;
5481 tree step = fd->loop.step;
5482 tree itype = TREE_TYPE (l);
5483 if (POINTER_TYPE_P (itype))
5484 itype = signed_type_for (itype);
5485 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5486 if (TYPE_UNSIGNED (itype)
5487 && fd->loop.cond_code == GT_EXPR)
5488 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5489 fold_build1 (NEGATE_EXPR, itype, l),
5490 fold_build1 (NEGATE_EXPR,
5491 itype, step));
5492 else
5493 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5494 t = fold_build2 (MULT_EXPR, stept,
5495 fold_convert (stept, l), t);
5497 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5499 if (omp_is_reference (var))
5501 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5502 tree new_vard = TREE_OPERAND (new_var, 0);
5503 gcc_assert (DECL_P (new_vard));
5504 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5505 nx = TYPE_SIZE_UNIT (type);
5506 if (TREE_CONSTANT (nx))
5508 nx = create_tmp_var_raw (type,
5509 get_name (var));
5510 gimple_add_tmp_var (nx);
5511 TREE_ADDRESSABLE (nx) = 1;
5512 nx = build_fold_addr_expr_loc (clause_loc,
5513 nx);
5514 nx = fold_convert_loc (clause_loc,
5515 TREE_TYPE (new_vard),
5516 nx);
5517 gimplify_assign (new_vard, nx, ilist);
5521 x = lang_hooks.decls.omp_clause_linear_ctor
5522 (c, new_var, x, t);
5523 gimplify_and_add (x, ilist);
5524 goto do_dtor;
5527 if (POINTER_TYPE_P (TREE_TYPE (x)))
5528 x = fold_build2 (POINTER_PLUS_EXPR,
5529 TREE_TYPE (x), x, t);
5530 else
5531 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5534 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5535 || TREE_ADDRESSABLE (new_var)
5536 || omp_is_reference (var))
5537 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5538 ivar, lvar))
5540 if (omp_is_reference (var))
5542 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5543 tree new_vard = TREE_OPERAND (new_var, 0);
5544 gcc_assert (DECL_P (new_vard));
5545 SET_DECL_VALUE_EXPR (new_vard,
5546 build_fold_addr_expr (lvar));
5547 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5551 tree iv = create_tmp_var (TREE_TYPE (new_var));
5552 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5553 gimplify_and_add (x, ilist);
5554 gimple_stmt_iterator gsi
5555 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5556 gassign *g
5557 = gimple_build_assign (unshare_expr (lvar), iv);
5558 gsi_insert_before_without_update (&gsi, g,
5559 GSI_SAME_STMT);
5560 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5561 enum tree_code code = PLUS_EXPR;
5562 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5563 code = POINTER_PLUS_EXPR;
5564 g = gimple_build_assign (iv, code, iv, t);
5565 gsi_insert_before_without_update (&gsi, g,
5566 GSI_SAME_STMT);
5567 break;
5569 x = lang_hooks.decls.omp_clause_copy_ctor
5570 (c, unshare_expr (ivar), x);
5571 gimplify_and_add (x, &llist[0]);
5572 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5573 if (x)
5574 gimplify_and_add (x, &llist[1]);
5575 break;
5577 if (omp_is_reference (var))
5579 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5580 tree new_vard = TREE_OPERAND (new_var, 0);
5581 gcc_assert (DECL_P (new_vard));
5582 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5583 nx = TYPE_SIZE_UNIT (type);
5584 if (TREE_CONSTANT (nx))
5586 nx = create_tmp_var_raw (type, get_name (var));
5587 gimple_add_tmp_var (nx);
5588 TREE_ADDRESSABLE (nx) = 1;
5589 nx = build_fold_addr_expr_loc (clause_loc, nx);
5590 nx = fold_convert_loc (clause_loc,
5591 TREE_TYPE (new_vard), nx);
5592 gimplify_assign (new_vard, nx, ilist);
5596 x = lang_hooks.decls.omp_clause_copy_ctor
5597 (c, unshare_expr (new_var), x);
5598 gimplify_and_add (x, ilist);
5599 goto do_dtor;
5601 case OMP_CLAUSE__LOOPTEMP_:
5602 case OMP_CLAUSE__REDUCTEMP_:
5603 gcc_assert (is_taskreg_ctx (ctx));
5604 x = build_outer_var_ref (var, ctx);
5605 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5606 gimplify_and_add (x, ilist);
5607 break;
5609 case OMP_CLAUSE_COPYIN:
5610 by_ref = use_pointer_for_field (var, NULL);
5611 x = build_receiver_ref (var, by_ref, ctx);
5612 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5613 append_to_statement_list (x, &copyin_seq);
5614 copyin_by_ref |= by_ref;
5615 break;
5617 case OMP_CLAUSE_REDUCTION:
5618 case OMP_CLAUSE_IN_REDUCTION:
5619 /* OpenACC reductions are initialized using the
5620 GOACC_REDUCTION internal function. */
5621 if (is_gimple_omp_oacc (ctx->stmt))
5622 break;
5623 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5625 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5626 gimple *tseq;
5627 tree ptype = TREE_TYPE (placeholder);
5628 if (cond)
5630 x = error_mark_node;
5631 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5632 && !task_reduction_needs_orig_p)
5633 x = var;
5634 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5636 tree pptype = build_pointer_type (ptype);
5637 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5638 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5639 size_int (task_reduction_cnt_full
5640 + task_reduction_cntorig - 1),
5641 NULL_TREE, NULL_TREE);
5642 else
5644 unsigned int idx
5645 = *ctx->task_reduction_map->get (c);
5646 x = task_reduction_read (ilist, tskred_temp,
5647 pptype, 7 + 3 * idx);
5649 x = fold_convert (pptype, x);
5650 x = build_simple_mem_ref (x);
5653 else
5655 x = build_outer_var_ref (var, ctx);
5657 if (omp_is_reference (var)
5658 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5659 x = build_fold_addr_expr_loc (clause_loc, x);
5661 SET_DECL_VALUE_EXPR (placeholder, x);
5662 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5663 tree new_vard = new_var;
5664 if (omp_is_reference (var))
5666 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5667 new_vard = TREE_OPERAND (new_var, 0);
5668 gcc_assert (DECL_P (new_vard));
5670 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5671 if (is_simd
5672 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5673 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5674 rvarp = &rvar;
5675 if (is_simd
5676 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5677 ivar, lvar, rvarp,
5678 &rvar2))
5680 if (new_vard == new_var)
5682 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5683 SET_DECL_VALUE_EXPR (new_var, ivar);
5685 else
5687 SET_DECL_VALUE_EXPR (new_vard,
5688 build_fold_addr_expr (ivar));
5689 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5691 x = lang_hooks.decls.omp_clause_default_ctor
5692 (c, unshare_expr (ivar),
5693 build_outer_var_ref (var, ctx));
5694 if (rvarp && ctx->for_simd_scan_phase)
5696 if (x)
5697 gimplify_and_add (x, &llist[0]);
5698 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5699 if (x)
5700 gimplify_and_add (x, &llist[1]);
5701 break;
5703 else if (rvarp)
5705 if (x)
5707 gimplify_and_add (x, &llist[0]);
5709 tree ivar2 = unshare_expr (lvar);
5710 TREE_OPERAND (ivar2, 1) = sctx.idx;
5711 x = lang_hooks.decls.omp_clause_default_ctor
5712 (c, ivar2, build_outer_var_ref (var, ctx));
5713 gimplify_and_add (x, &llist[0]);
5715 if (rvar2)
5717 x = lang_hooks.decls.omp_clause_default_ctor
5718 (c, unshare_expr (rvar2),
5719 build_outer_var_ref (var, ctx));
5720 gimplify_and_add (x, &llist[0]);
5723 /* For types that need construction, add another
5724 private var which will be default constructed
5725 and optionally initialized with
5726 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5727 loop we want to assign this value instead of
5728 constructing and destructing it in each
5729 iteration. */
5730 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5731 gimple_add_tmp_var (nv);
5732 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5733 ? rvar2
5734 : ivar, 0),
5735 nv);
5736 x = lang_hooks.decls.omp_clause_default_ctor
5737 (c, nv, build_outer_var_ref (var, ctx));
5738 gimplify_and_add (x, ilist);
5740 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5742 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5743 x = DECL_VALUE_EXPR (new_vard);
5744 tree vexpr = nv;
5745 if (new_vard != new_var)
5746 vexpr = build_fold_addr_expr (nv);
5747 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5748 lower_omp (&tseq, ctx);
5749 SET_DECL_VALUE_EXPR (new_vard, x);
5750 gimple_seq_add_seq (ilist, tseq);
5751 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5754 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5755 if (x)
5756 gimplify_and_add (x, dlist);
5759 tree ref = build_outer_var_ref (var, ctx);
5760 x = unshare_expr (ivar);
5761 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5762 ref);
5763 gimplify_and_add (x, &llist[0]);
5765 ref = build_outer_var_ref (var, ctx);
5766 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5767 rvar);
5768 gimplify_and_add (x, &llist[3]);
5770 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5771 if (new_vard == new_var)
5772 SET_DECL_VALUE_EXPR (new_var, lvar);
5773 else
5774 SET_DECL_VALUE_EXPR (new_vard,
5775 build_fold_addr_expr (lvar));
5777 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5778 if (x)
5779 gimplify_and_add (x, &llist[1]);
5781 tree ivar2 = unshare_expr (lvar);
5782 TREE_OPERAND (ivar2, 1) = sctx.idx;
5783 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5784 if (x)
5785 gimplify_and_add (x, &llist[1]);
5787 if (rvar2)
5789 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5790 if (x)
5791 gimplify_and_add (x, &llist[1]);
5793 break;
5795 if (x)
5796 gimplify_and_add (x, &llist[0]);
5797 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5799 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5800 lower_omp (&tseq, ctx);
5801 gimple_seq_add_seq (&llist[0], tseq);
5803 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5804 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5805 lower_omp (&tseq, ctx);
5806 gimple_seq_add_seq (&llist[1], tseq);
5807 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5808 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5809 if (new_vard == new_var)
5810 SET_DECL_VALUE_EXPR (new_var, lvar);
5811 else
5812 SET_DECL_VALUE_EXPR (new_vard,
5813 build_fold_addr_expr (lvar));
5814 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5815 if (x)
5816 gimplify_and_add (x, &llist[1]);
5817 break;
5819 /* If this is a reference to constant size reduction var
5820 with placeholder, we haven't emitted the initializer
5821 for it because it is undesirable if SIMD arrays are used.
5822 But if they aren't used, we need to emit the deferred
5823 initialization now. */
5824 else if (omp_is_reference (var) && is_simd)
5825 handle_simd_reference (clause_loc, new_vard, ilist);
5827 tree lab2 = NULL_TREE;
5828 if (cond)
5830 gimple *g;
5831 if (!is_parallel_ctx (ctx))
5833 tree condv = create_tmp_var (boolean_type_node);
5834 tree m = build_simple_mem_ref (cond);
5835 g = gimple_build_assign (condv, m);
5836 gimple_seq_add_stmt (ilist, g);
5837 tree lab1
5838 = create_artificial_label (UNKNOWN_LOCATION);
5839 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5840 g = gimple_build_cond (NE_EXPR, condv,
5841 boolean_false_node,
5842 lab2, lab1);
5843 gimple_seq_add_stmt (ilist, g);
5844 gimple_seq_add_stmt (ilist,
5845 gimple_build_label (lab1));
5847 g = gimple_build_assign (build_simple_mem_ref (cond),
5848 boolean_true_node);
5849 gimple_seq_add_stmt (ilist, g);
5851 x = lang_hooks.decls.omp_clause_default_ctor
5852 (c, unshare_expr (new_var),
5853 cond ? NULL_TREE
5854 : build_outer_var_ref (var, ctx));
5855 if (x)
5856 gimplify_and_add (x, ilist);
5858 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5859 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5861 if (ctx->for_simd_scan_phase)
5862 goto do_dtor;
5863 if (x || (!is_simd
5864 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5866 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5867 gimple_add_tmp_var (nv);
5868 ctx->cb.decl_map->put (new_vard, nv);
5869 x = lang_hooks.decls.omp_clause_default_ctor
5870 (c, nv, build_outer_var_ref (var, ctx));
5871 if (x)
5872 gimplify_and_add (x, ilist);
5873 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5875 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5876 tree vexpr = nv;
5877 if (new_vard != new_var)
5878 vexpr = build_fold_addr_expr (nv);
5879 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5880 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5881 lower_omp (&tseq, ctx);
5882 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5883 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5884 gimple_seq_add_seq (ilist, tseq);
5886 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5887 if (is_simd && ctx->scan_exclusive)
5889 tree nv2
5890 = create_tmp_var_raw (TREE_TYPE (new_var));
5891 gimple_add_tmp_var (nv2);
5892 ctx->cb.decl_map->put (nv, nv2);
5893 x = lang_hooks.decls.omp_clause_default_ctor
5894 (c, nv2, build_outer_var_ref (var, ctx));
5895 gimplify_and_add (x, ilist);
5896 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5897 if (x)
5898 gimplify_and_add (x, dlist);
5900 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5901 if (x)
5902 gimplify_and_add (x, dlist);
5904 else if (is_simd
5905 && ctx->scan_exclusive
5906 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5908 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5909 gimple_add_tmp_var (nv2);
5910 ctx->cb.decl_map->put (new_vard, nv2);
5911 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5912 if (x)
5913 gimplify_and_add (x, dlist);
5915 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5916 goto do_dtor;
5919 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5921 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5922 lower_omp (&tseq, ctx);
5923 gimple_seq_add_seq (ilist, tseq);
5925 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5926 if (is_simd)
5928 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5929 lower_omp (&tseq, ctx);
5930 gimple_seq_add_seq (dlist, tseq);
5931 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5933 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5934 if (cond)
5936 if (lab2)
5937 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5938 break;
5940 goto do_dtor;
5942 else
5944 x = omp_reduction_init (c, TREE_TYPE (new_var));
5945 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5946 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5948 if (cond)
5950 gimple *g;
5951 tree lab2 = NULL_TREE;
5952 /* GOMP_taskgroup_reduction_register memsets the whole
5953 array to zero. If the initializer is zero, we don't
5954 need to initialize it again, just mark it as ever
5955 used unconditionally, i.e. cond = true. */
5956 if (initializer_zerop (x))
5958 g = gimple_build_assign (build_simple_mem_ref (cond),
5959 boolean_true_node);
5960 gimple_seq_add_stmt (ilist, g);
5961 break;
5964 /* Otherwise, emit
5965 if (!cond) { cond = true; new_var = x; } */
5966 if (!is_parallel_ctx (ctx))
5968 tree condv = create_tmp_var (boolean_type_node);
5969 tree m = build_simple_mem_ref (cond);
5970 g = gimple_build_assign (condv, m);
5971 gimple_seq_add_stmt (ilist, g);
5972 tree lab1
5973 = create_artificial_label (UNKNOWN_LOCATION);
5974 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5975 g = gimple_build_cond (NE_EXPR, condv,
5976 boolean_false_node,
5977 lab2, lab1);
5978 gimple_seq_add_stmt (ilist, g);
5979 gimple_seq_add_stmt (ilist,
5980 gimple_build_label (lab1));
5982 g = gimple_build_assign (build_simple_mem_ref (cond),
5983 boolean_true_node);
5984 gimple_seq_add_stmt (ilist, g);
5985 gimplify_assign (new_var, x, ilist);
5986 if (lab2)
5987 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5988 break;
5991 /* reduction(-:var) sums up the partial results, so it
5992 acts identically to reduction(+:var). */
5993 if (code == MINUS_EXPR)
5994 code = PLUS_EXPR;
5996 tree new_vard = new_var;
5997 if (is_simd && omp_is_reference (var))
5999 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6000 new_vard = TREE_OPERAND (new_var, 0);
6001 gcc_assert (DECL_P (new_vard));
6003 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6004 if (is_simd
6005 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6006 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6007 rvarp = &rvar;
6008 if (is_simd
6009 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6010 ivar, lvar, rvarp,
6011 &rvar2))
6013 if (new_vard != new_var)
6015 SET_DECL_VALUE_EXPR (new_vard,
6016 build_fold_addr_expr (lvar));
6017 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6020 tree ref = build_outer_var_ref (var, ctx);
6022 if (rvarp)
6024 if (ctx->for_simd_scan_phase)
6025 break;
6026 gimplify_assign (ivar, ref, &llist[0]);
6027 ref = build_outer_var_ref (var, ctx);
6028 gimplify_assign (ref, rvar, &llist[3]);
6029 break;
6032 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6034 if (sctx.is_simt)
6036 if (!simt_lane)
6037 simt_lane = create_tmp_var (unsigned_type_node);
6038 x = build_call_expr_internal_loc
6039 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6040 TREE_TYPE (ivar), 2, ivar, simt_lane);
6041 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6042 gimplify_assign (ivar, x, &llist[2]);
6044 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6045 ref = build_outer_var_ref (var, ctx);
6046 gimplify_assign (ref, x, &llist[1]);
6049 else
6051 if (omp_is_reference (var) && is_simd)
6052 handle_simd_reference (clause_loc, new_vard, ilist);
6053 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6054 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6055 break;
6056 gimplify_assign (new_var, x, ilist);
6057 if (is_simd)
6059 tree ref = build_outer_var_ref (var, ctx);
6061 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6062 ref = build_outer_var_ref (var, ctx);
6063 gimplify_assign (ref, x, dlist);
6067 break;
6069 default:
6070 gcc_unreachable ();
6074 if (tskred_avar)
6076 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6077 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6080 if (known_eq (sctx.max_vf, 1U))
6082 sctx.is_simt = false;
6083 if (ctx->lastprivate_conditional_map)
6085 if (gimple_omp_for_combined_into_p (ctx->stmt))
6087 /* Signal to lower_omp_1 that it should use parent context. */
6088 ctx->combined_into_simd_safelen1 = true;
6089 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6091 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6093 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6094 omp_context *outer = ctx->outer;
6095 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6096 outer = outer->outer;
6097 tree *v = ctx->lastprivate_conditional_map->get (o);
6098 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6099 tree *pv = outer->lastprivate_conditional_map->get (po);
6100 *v = *pv;
6103 else
6105 /* When not vectorized, treat lastprivate(conditional:) like
6106 normal lastprivate, as there will be just one simd lane
6107 writing the privatized variable. */
6108 delete ctx->lastprivate_conditional_map;
6109 ctx->lastprivate_conditional_map = NULL;
6114 if (nonconst_simd_if)
6116 if (sctx.lane == NULL_TREE)
6118 sctx.idx = create_tmp_var (unsigned_type_node);
6119 sctx.lane = create_tmp_var (unsigned_type_node);
6121 /* FIXME: For now. */
6122 sctx.is_simt = false;
6125 if (sctx.lane || sctx.is_simt)
6127 uid = create_tmp_var (ptr_type_node, "simduid");
6128 /* Don't want uninit warnings on simduid, it is always uninitialized,
6129 but we use it not for the value, but for the DECL_UID only. */
6130 TREE_NO_WARNING (uid) = 1;
6131 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6132 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6133 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6134 gimple_omp_for_set_clauses (ctx->stmt, c);
6136 /* Emit calls denoting privatized variables and initializing a pointer to
6137 structure that holds private variables as fields after ompdevlow pass. */
6138 if (sctx.is_simt)
6140 sctx.simt_eargs[0] = uid;
6141 gimple *g
6142 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6143 gimple_call_set_lhs (g, uid);
6144 gimple_seq_add_stmt (ilist, g);
6145 sctx.simt_eargs.release ();
6147 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6148 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6149 gimple_call_set_lhs (g, simtrec);
6150 gimple_seq_add_stmt (ilist, g);
6152 if (sctx.lane)
6154 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6155 2 + (nonconst_simd_if != NULL),
6156 uid, integer_zero_node,
6157 nonconst_simd_if);
6158 gimple_call_set_lhs (g, sctx.lane);
6159 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6160 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6161 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6162 build_int_cst (unsigned_type_node, 0));
6163 gimple_seq_add_stmt (ilist, g);
6164 if (sctx.lastlane)
6166 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6167 2, uid, sctx.lane);
6168 gimple_call_set_lhs (g, sctx.lastlane);
6169 gimple_seq_add_stmt (dlist, g);
6170 gimple_seq_add_seq (dlist, llist[3]);
6172 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6173 if (llist[2])
6175 tree simt_vf = create_tmp_var (unsigned_type_node);
6176 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6177 gimple_call_set_lhs (g, simt_vf);
6178 gimple_seq_add_stmt (dlist, g);
6180 tree t = build_int_cst (unsigned_type_node, 1);
6181 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6182 gimple_seq_add_stmt (dlist, g);
6184 t = build_int_cst (unsigned_type_node, 0);
6185 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6186 gimple_seq_add_stmt (dlist, g);
6188 tree body = create_artificial_label (UNKNOWN_LOCATION);
6189 tree header = create_artificial_label (UNKNOWN_LOCATION);
6190 tree end = create_artificial_label (UNKNOWN_LOCATION);
6191 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6192 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6194 gimple_seq_add_seq (dlist, llist[2]);
6196 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6197 gimple_seq_add_stmt (dlist, g);
6199 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6200 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6201 gimple_seq_add_stmt (dlist, g);
6203 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6205 for (int i = 0; i < 2; i++)
6206 if (llist[i])
6208 tree vf = create_tmp_var (unsigned_type_node);
6209 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6210 gimple_call_set_lhs (g, vf);
6211 gimple_seq *seq = i == 0 ? ilist : dlist;
6212 gimple_seq_add_stmt (seq, g);
6213 tree t = build_int_cst (unsigned_type_node, 0);
6214 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6215 gimple_seq_add_stmt (seq, g);
6216 tree body = create_artificial_label (UNKNOWN_LOCATION);
6217 tree header = create_artificial_label (UNKNOWN_LOCATION);
6218 tree end = create_artificial_label (UNKNOWN_LOCATION);
6219 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6220 gimple_seq_add_stmt (seq, gimple_build_label (body));
6221 gimple_seq_add_seq (seq, llist[i]);
6222 t = build_int_cst (unsigned_type_node, 1);
6223 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6224 gimple_seq_add_stmt (seq, g);
6225 gimple_seq_add_stmt (seq, gimple_build_label (header));
6226 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6227 gimple_seq_add_stmt (seq, g);
6228 gimple_seq_add_stmt (seq, gimple_build_label (end));
6231 if (sctx.is_simt)
6233 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6234 gimple *g
6235 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6236 gimple_seq_add_stmt (dlist, g);
6239 /* The copyin sequence is not to be executed by the main thread, since
6240 that would result in self-copies. Perhaps not visible to scalars,
6241 but it certainly is to C++ operator=. */
6242 if (copyin_seq)
6244 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6246 x = build2 (NE_EXPR, boolean_type_node, x,
6247 build_int_cst (TREE_TYPE (x), 0));
6248 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6249 gimplify_and_add (x, ilist);
6252 /* If any copyin variable is passed by reference, we must ensure the
6253 master thread doesn't modify it before it is copied over in all
6254 threads. Similarly for variables in both firstprivate and
6255 lastprivate clauses we need to ensure the lastprivate copying
6256 happens after firstprivate copying in all threads. And similarly
6257 for UDRs if initializer expression refers to omp_orig. */
6258 if (copyin_by_ref || lastprivate_firstprivate
6259 || (reduction_omp_orig_ref
6260 && !ctx->scan_inclusive
6261 && !ctx->scan_exclusive))
6263 /* Don't add any barrier for #pragma omp simd or
6264 #pragma omp distribute. */
6265 if (!is_task_ctx (ctx)
6266 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6267 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6268 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6271 /* If max_vf is non-zero, then we can use only a vectorization factor
6272 up to the max_vf we chose. So stick it into the safelen clause. */
6273 if (maybe_ne (sctx.max_vf, 0U))
6275 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6276 OMP_CLAUSE_SAFELEN);
6277 poly_uint64 safe_len;
6278 if (c == NULL_TREE
6279 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6280 && maybe_gt (safe_len, sctx.max_vf)))
6282 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6283 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6284 sctx.max_vf);
6285 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6286 gimple_omp_for_set_clauses (ctx->stmt, c);
6291 /* Create temporary variables for lastprivate(conditional:) implementation
6292 in context CTX with CLAUSES. */
6294 static void
6295 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6297 tree iter_type = NULL_TREE;
6298 tree cond_ptr = NULL_TREE;
6299 tree iter_var = NULL_TREE;
6300 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6301 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6302 tree next = *clauses;
6303 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6304 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6305 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6307 if (is_simd)
6309 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6310 gcc_assert (cc);
6311 if (iter_type == NULL_TREE)
6313 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6314 iter_var = create_tmp_var_raw (iter_type);
6315 DECL_CONTEXT (iter_var) = current_function_decl;
6316 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6317 DECL_CHAIN (iter_var) = ctx->block_vars;
6318 ctx->block_vars = iter_var;
6319 tree c3
6320 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6321 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6322 OMP_CLAUSE_DECL (c3) = iter_var;
6323 OMP_CLAUSE_CHAIN (c3) = *clauses;
6324 *clauses = c3;
6325 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6327 next = OMP_CLAUSE_CHAIN (cc);
6328 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6329 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6330 ctx->lastprivate_conditional_map->put (o, v);
6331 continue;
6333 if (iter_type == NULL)
6335 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6337 struct omp_for_data fd;
6338 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6339 NULL);
6340 iter_type = unsigned_type_for (fd.iter_type);
6342 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6343 iter_type = unsigned_type_node;
6344 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6345 if (c2)
6347 cond_ptr
6348 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6349 OMP_CLAUSE_DECL (c2) = cond_ptr;
6351 else
6353 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6354 DECL_CONTEXT (cond_ptr) = current_function_decl;
6355 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6356 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6357 ctx->block_vars = cond_ptr;
6358 c2 = build_omp_clause (UNKNOWN_LOCATION,
6359 OMP_CLAUSE__CONDTEMP_);
6360 OMP_CLAUSE_DECL (c2) = cond_ptr;
6361 OMP_CLAUSE_CHAIN (c2) = *clauses;
6362 *clauses = c2;
6364 iter_var = create_tmp_var_raw (iter_type);
6365 DECL_CONTEXT (iter_var) = current_function_decl;
6366 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6367 DECL_CHAIN (iter_var) = ctx->block_vars;
6368 ctx->block_vars = iter_var;
6369 tree c3
6370 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6371 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6372 OMP_CLAUSE_DECL (c3) = iter_var;
6373 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6374 OMP_CLAUSE_CHAIN (c2) = c3;
6375 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6377 tree v = create_tmp_var_raw (iter_type);
6378 DECL_CONTEXT (v) = current_function_decl;
6379 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6380 DECL_CHAIN (v) = ctx->block_vars;
6381 ctx->block_vars = v;
6382 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6383 ctx->lastprivate_conditional_map->put (o, v);
6388 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6389 both parallel and workshare constructs. PREDICATE may be NULL if it's
6390 always true. BODY_P is the sequence to insert early initialization
6391 if needed, STMT_LIST is where the non-conditional lastprivate handling
6392 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6393 section. */
6395 static void
6396 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6397 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6398 omp_context *ctx)
6400 tree x, c, label = NULL, orig_clauses = clauses;
6401 bool par_clauses = false;
6402 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6403 unsigned HOST_WIDE_INT conditional_off = 0;
6404 gimple_seq post_stmt_list = NULL;
6406 /* Early exit if there are no lastprivate or linear clauses. */
6407 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6408 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6409 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6410 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6411 break;
6412 if (clauses == NULL)
6414 /* If this was a workshare clause, see if it had been combined
6415 with its parallel. In that case, look for the clauses on the
6416 parallel statement itself. */
6417 if (is_parallel_ctx (ctx))
6418 return;
6420 ctx = ctx->outer;
6421 if (ctx == NULL || !is_parallel_ctx (ctx))
6422 return;
6424 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6425 OMP_CLAUSE_LASTPRIVATE);
6426 if (clauses == NULL)
6427 return;
6428 par_clauses = true;
6431 bool maybe_simt = false;
6432 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6433 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6435 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6436 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6437 if (simduid)
6438 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6441 if (predicate)
6443 gcond *stmt;
6444 tree label_true, arm1, arm2;
6445 enum tree_code pred_code = TREE_CODE (predicate);
6447 label = create_artificial_label (UNKNOWN_LOCATION);
6448 label_true = create_artificial_label (UNKNOWN_LOCATION);
6449 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6451 arm1 = TREE_OPERAND (predicate, 0);
6452 arm2 = TREE_OPERAND (predicate, 1);
6453 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6454 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6456 else
6458 arm1 = predicate;
6459 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6460 arm2 = boolean_false_node;
6461 pred_code = NE_EXPR;
6463 if (maybe_simt)
6465 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6466 c = fold_convert (integer_type_node, c);
6467 simtcond = create_tmp_var (integer_type_node);
6468 gimplify_assign (simtcond, c, stmt_list);
6469 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6470 1, simtcond);
6471 c = create_tmp_var (integer_type_node);
6472 gimple_call_set_lhs (g, c);
6473 gimple_seq_add_stmt (stmt_list, g);
6474 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6475 label_true, label);
6477 else
6478 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6479 gimple_seq_add_stmt (stmt_list, stmt);
6480 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6483 tree cond_ptr = NULL_TREE;
6484 for (c = clauses; c ;)
6486 tree var, new_var;
6487 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6488 gimple_seq *this_stmt_list = stmt_list;
6489 tree lab2 = NULL_TREE;
6491 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6492 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6493 && ctx->lastprivate_conditional_map
6494 && !ctx->combined_into_simd_safelen1)
6496 gcc_assert (body_p);
6497 if (simduid)
6498 goto next;
6499 if (cond_ptr == NULL_TREE)
6501 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6502 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6504 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6505 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6506 tree v = *ctx->lastprivate_conditional_map->get (o);
6507 gimplify_assign (v, build_zero_cst (type), body_p);
6508 this_stmt_list = cstmt_list;
6509 tree mem;
6510 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6512 mem = build2 (MEM_REF, type, cond_ptr,
6513 build_int_cst (TREE_TYPE (cond_ptr),
6514 conditional_off));
6515 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6517 else
6518 mem = build4 (ARRAY_REF, type, cond_ptr,
6519 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6520 tree mem2 = copy_node (mem);
6521 gimple_seq seq = NULL;
6522 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6523 gimple_seq_add_seq (this_stmt_list, seq);
6524 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6525 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6526 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6527 gimple_seq_add_stmt (this_stmt_list, g);
6528 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6529 gimplify_assign (mem2, v, this_stmt_list);
6531 else if (predicate
6532 && ctx->combined_into_simd_safelen1
6533 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6534 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6535 && ctx->lastprivate_conditional_map)
6536 this_stmt_list = &post_stmt_list;
6538 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6539 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6540 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6542 var = OMP_CLAUSE_DECL (c);
6543 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6544 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6545 && is_taskloop_ctx (ctx))
6547 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6548 new_var = lookup_decl (var, ctx->outer);
6550 else
6552 new_var = lookup_decl (var, ctx);
6553 /* Avoid uninitialized warnings for lastprivate and
6554 for linear iterators. */
6555 if (predicate
6556 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6557 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6558 TREE_NO_WARNING (new_var) = 1;
6561 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6563 tree val = DECL_VALUE_EXPR (new_var);
6564 if (TREE_CODE (val) == ARRAY_REF
6565 && VAR_P (TREE_OPERAND (val, 0))
6566 && lookup_attribute ("omp simd array",
6567 DECL_ATTRIBUTES (TREE_OPERAND (val,
6568 0))))
6570 if (lastlane == NULL)
6572 lastlane = create_tmp_var (unsigned_type_node);
6573 gcall *g
6574 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6575 2, simduid,
6576 TREE_OPERAND (val, 1));
6577 gimple_call_set_lhs (g, lastlane);
6578 gimple_seq_add_stmt (this_stmt_list, g);
6580 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6581 TREE_OPERAND (val, 0), lastlane,
6582 NULL_TREE, NULL_TREE);
6583 TREE_THIS_NOTRAP (new_var) = 1;
6586 else if (maybe_simt)
6588 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6589 ? DECL_VALUE_EXPR (new_var)
6590 : new_var);
6591 if (simtlast == NULL)
6593 simtlast = create_tmp_var (unsigned_type_node);
6594 gcall *g = gimple_build_call_internal
6595 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6596 gimple_call_set_lhs (g, simtlast);
6597 gimple_seq_add_stmt (this_stmt_list, g);
6599 x = build_call_expr_internal_loc
6600 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6601 TREE_TYPE (val), 2, val, simtlast);
6602 new_var = unshare_expr (new_var);
6603 gimplify_assign (new_var, x, this_stmt_list);
6604 new_var = unshare_expr (new_var);
6607 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6608 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6610 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6611 gimple_seq_add_seq (this_stmt_list,
6612 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6613 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6615 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6616 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6618 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6619 gimple_seq_add_seq (this_stmt_list,
6620 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6621 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6624 x = NULL_TREE;
6625 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6626 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6627 && is_taskloop_ctx (ctx))
6629 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6630 ctx->outer->outer);
6631 if (is_global_var (ovar))
6632 x = ovar;
6634 if (!x)
6635 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6636 if (omp_is_reference (var))
6637 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6638 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6639 gimplify_and_add (x, this_stmt_list);
6641 if (lab2)
6642 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6645 next:
6646 c = OMP_CLAUSE_CHAIN (c);
6647 if (c == NULL && !par_clauses)
6649 /* If this was a workshare clause, see if it had been combined
6650 with its parallel. In that case, continue looking for the
6651 clauses also on the parallel statement itself. */
6652 if (is_parallel_ctx (ctx))
6653 break;
6655 ctx = ctx->outer;
6656 if (ctx == NULL || !is_parallel_ctx (ctx))
6657 break;
6659 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6660 OMP_CLAUSE_LASTPRIVATE);
6661 par_clauses = true;
6665 if (label)
6666 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6667 gimple_seq_add_seq (stmt_list, post_stmt_list);
6670 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6671 (which might be a placeholder). INNER is true if this is an inner
6672 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6673 join markers. Generate the before-loop forking sequence in
6674 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6675 general form of these sequences is
6677 GOACC_REDUCTION_SETUP
6678 GOACC_FORK
6679 GOACC_REDUCTION_INIT
6681 GOACC_REDUCTION_FINI
6682 GOACC_JOIN
6683 GOACC_REDUCTION_TEARDOWN. */
6685 static void
6686 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6687 gcall *fork, gcall *join, gimple_seq *fork_seq,
6688 gimple_seq *join_seq, omp_context *ctx)
6690 gimple_seq before_fork = NULL;
6691 gimple_seq after_fork = NULL;
6692 gimple_seq before_join = NULL;
6693 gimple_seq after_join = NULL;
6694 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6695 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6696 unsigned offset = 0;
6698 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6699 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6701 tree orig = OMP_CLAUSE_DECL (c);
6702 tree var = maybe_lookup_decl (orig, ctx);
6703 tree ref_to_res = NULL_TREE;
6704 tree incoming, outgoing, v1, v2, v3;
6705 bool is_private = false;
6707 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6708 if (rcode == MINUS_EXPR)
6709 rcode = PLUS_EXPR;
6710 else if (rcode == TRUTH_ANDIF_EXPR)
6711 rcode = BIT_AND_EXPR;
6712 else if (rcode == TRUTH_ORIF_EXPR)
6713 rcode = BIT_IOR_EXPR;
6714 tree op = build_int_cst (unsigned_type_node, rcode);
6716 if (!var)
6717 var = orig;
6719 incoming = outgoing = var;
6721 if (!inner)
6723 /* See if an outer construct also reduces this variable. */
6724 omp_context *outer = ctx;
6726 while (omp_context *probe = outer->outer)
6728 enum gimple_code type = gimple_code (probe->stmt);
6729 tree cls;
6731 switch (type)
6733 case GIMPLE_OMP_FOR:
6734 cls = gimple_omp_for_clauses (probe->stmt);
6735 break;
6737 case GIMPLE_OMP_TARGET:
6738 if ((gimple_omp_target_kind (probe->stmt)
6739 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6740 && (gimple_omp_target_kind (probe->stmt)
6741 != GF_OMP_TARGET_KIND_OACC_SERIAL))
6742 goto do_lookup;
6744 cls = gimple_omp_target_clauses (probe->stmt);
6745 break;
6747 default:
6748 goto do_lookup;
6751 outer = probe;
6752 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6753 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6754 && orig == OMP_CLAUSE_DECL (cls))
6756 incoming = outgoing = lookup_decl (orig, probe);
6757 goto has_outer_reduction;
6759 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6760 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6761 && orig == OMP_CLAUSE_DECL (cls))
6763 is_private = true;
6764 goto do_lookup;
6768 do_lookup:
6769 /* This is the outermost construct with this reduction,
6770 see if there's a mapping for it. */
6771 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6772 && maybe_lookup_field (orig, outer) && !is_private)
6774 ref_to_res = build_receiver_ref (orig, false, outer);
6775 if (omp_is_reference (orig))
6776 ref_to_res = build_simple_mem_ref (ref_to_res);
6778 tree type = TREE_TYPE (var);
6779 if (POINTER_TYPE_P (type))
6780 type = TREE_TYPE (type);
6782 outgoing = var;
6783 incoming = omp_reduction_init_op (loc, rcode, type);
6785 else
6787 /* Try to look at enclosing contexts for reduction var,
6788 use original if no mapping found. */
6789 tree t = NULL_TREE;
6790 omp_context *c = ctx->outer;
6791 while (c && !t)
6793 t = maybe_lookup_decl (orig, c);
6794 c = c->outer;
6796 incoming = outgoing = (t ? t : orig);
6799 has_outer_reduction:;
6802 if (!ref_to_res)
6803 ref_to_res = integer_zero_node;
6805 if (omp_is_reference (orig))
6807 tree type = TREE_TYPE (var);
6808 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6810 if (!inner)
6812 tree x = create_tmp_var (TREE_TYPE (type), id);
6813 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6816 v1 = create_tmp_var (type, id);
6817 v2 = create_tmp_var (type, id);
6818 v3 = create_tmp_var (type, id);
6820 gimplify_assign (v1, var, fork_seq);
6821 gimplify_assign (v2, var, fork_seq);
6822 gimplify_assign (v3, var, fork_seq);
6824 var = build_simple_mem_ref (var);
6825 v1 = build_simple_mem_ref (v1);
6826 v2 = build_simple_mem_ref (v2);
6827 v3 = build_simple_mem_ref (v3);
6828 outgoing = build_simple_mem_ref (outgoing);
6830 if (!TREE_CONSTANT (incoming))
6831 incoming = build_simple_mem_ref (incoming);
6833 else
6834 v1 = v2 = v3 = var;
6836 /* Determine position in reduction buffer, which may be used
6837 by target. The parser has ensured that this is not a
6838 variable-sized type. */
6839 fixed_size_mode mode
6840 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6841 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6842 offset = (offset + align - 1) & ~(align - 1);
6843 tree off = build_int_cst (sizetype, offset);
6844 offset += GET_MODE_SIZE (mode);
6846 if (!init_code)
6848 init_code = build_int_cst (integer_type_node,
6849 IFN_GOACC_REDUCTION_INIT);
6850 fini_code = build_int_cst (integer_type_node,
6851 IFN_GOACC_REDUCTION_FINI);
6852 setup_code = build_int_cst (integer_type_node,
6853 IFN_GOACC_REDUCTION_SETUP);
6854 teardown_code = build_int_cst (integer_type_node,
6855 IFN_GOACC_REDUCTION_TEARDOWN);
6858 tree setup_call
6859 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6860 TREE_TYPE (var), 6, setup_code,
6861 unshare_expr (ref_to_res),
6862 incoming, level, op, off);
6863 tree init_call
6864 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6865 TREE_TYPE (var), 6, init_code,
6866 unshare_expr (ref_to_res),
6867 v1, level, op, off);
6868 tree fini_call
6869 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6870 TREE_TYPE (var), 6, fini_code,
6871 unshare_expr (ref_to_res),
6872 v2, level, op, off);
6873 tree teardown_call
6874 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6875 TREE_TYPE (var), 6, teardown_code,
6876 ref_to_res, v3, level, op, off);
6878 gimplify_assign (v1, setup_call, &before_fork);
6879 gimplify_assign (v2, init_call, &after_fork);
6880 gimplify_assign (v3, fini_call, &before_join);
6881 gimplify_assign (outgoing, teardown_call, &after_join);
6884 /* Now stitch things together. */
6885 gimple_seq_add_seq (fork_seq, before_fork);
6886 if (fork)
6887 gimple_seq_add_stmt (fork_seq, fork);
6888 gimple_seq_add_seq (fork_seq, after_fork);
6890 gimple_seq_add_seq (join_seq, before_join);
6891 if (join)
6892 gimple_seq_add_stmt (join_seq, join);
6893 gimple_seq_add_seq (join_seq, after_join);
6896 /* Generate code to implement the REDUCTION clauses, append it
6897 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6898 that should be emitted also inside of the critical section,
6899 in that case clear *CLIST afterwards, otherwise leave it as is
6900 and let the caller emit it itself. */
6902 static void
6903 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6904 gimple_seq *clist, omp_context *ctx)
6906 gimple_seq sub_seq = NULL;
6907 gimple *stmt;
6908 tree x, c;
6909 int count = 0;
6911 /* OpenACC loop reductions are handled elsewhere. */
6912 if (is_gimple_omp_oacc (ctx->stmt))
6913 return;
6915 /* SIMD reductions are handled in lower_rec_input_clauses. */
6916 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6917 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6918 return;
6920 /* inscan reductions are handled elsewhere. */
6921 if (ctx->scan_inclusive || ctx->scan_exclusive)
6922 return;
6924 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6925 update in that case, otherwise use a lock. */
6926 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6927 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6928 && !OMP_CLAUSE_REDUCTION_TASK (c))
6930 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6931 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6933 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6934 count = -1;
6935 break;
6937 count++;
6940 if (count == 0)
6941 return;
6943 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6945 tree var, ref, new_var, orig_var;
6946 enum tree_code code;
6947 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6949 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6950 || OMP_CLAUSE_REDUCTION_TASK (c))
6951 continue;
6953 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6954 orig_var = var = OMP_CLAUSE_DECL (c);
6955 if (TREE_CODE (var) == MEM_REF)
6957 var = TREE_OPERAND (var, 0);
6958 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6959 var = TREE_OPERAND (var, 0);
6960 if (TREE_CODE (var) == ADDR_EXPR)
6961 var = TREE_OPERAND (var, 0);
6962 else
6964 /* If this is a pointer or referenced based array
6965 section, the var could be private in the outer
6966 context e.g. on orphaned loop construct. Pretend this
6967 is private variable's outer reference. */
6968 ccode = OMP_CLAUSE_PRIVATE;
6969 if (TREE_CODE (var) == INDIRECT_REF)
6970 var = TREE_OPERAND (var, 0);
6972 orig_var = var;
6973 if (is_variable_sized (var))
6975 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6976 var = DECL_VALUE_EXPR (var);
6977 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6978 var = TREE_OPERAND (var, 0);
6979 gcc_assert (DECL_P (var));
6982 new_var = lookup_decl (var, ctx);
6983 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6984 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6985 ref = build_outer_var_ref (var, ctx, ccode);
6986 code = OMP_CLAUSE_REDUCTION_CODE (c);
6988 /* reduction(-:var) sums up the partial results, so it acts
6989 identically to reduction(+:var). */
6990 if (code == MINUS_EXPR)
6991 code = PLUS_EXPR;
6993 if (count == 1)
6995 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6997 addr = save_expr (addr);
6998 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6999 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
7000 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7001 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7002 gimplify_and_add (x, stmt_seqp);
7003 return;
7005 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7007 tree d = OMP_CLAUSE_DECL (c);
7008 tree type = TREE_TYPE (d);
7009 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7010 tree i = create_tmp_var (TREE_TYPE (v));
7011 tree ptype = build_pointer_type (TREE_TYPE (type));
7012 tree bias = TREE_OPERAND (d, 1);
7013 d = TREE_OPERAND (d, 0);
7014 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7016 tree b = TREE_OPERAND (d, 1);
7017 b = maybe_lookup_decl (b, ctx);
7018 if (b == NULL)
7020 b = TREE_OPERAND (d, 1);
7021 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7023 if (integer_zerop (bias))
7024 bias = b;
7025 else
7027 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7028 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7029 TREE_TYPE (b), b, bias);
7031 d = TREE_OPERAND (d, 0);
7033 /* For ref build_outer_var_ref already performs this, so
7034 only new_var needs a dereference. */
7035 if (TREE_CODE (d) == INDIRECT_REF)
7037 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7038 gcc_assert (omp_is_reference (var) && var == orig_var);
7040 else if (TREE_CODE (d) == ADDR_EXPR)
7042 if (orig_var == var)
7044 new_var = build_fold_addr_expr (new_var);
7045 ref = build_fold_addr_expr (ref);
7048 else
7050 gcc_assert (orig_var == var);
7051 if (omp_is_reference (var))
7052 ref = build_fold_addr_expr (ref);
7054 if (DECL_P (v))
7056 tree t = maybe_lookup_decl (v, ctx);
7057 if (t)
7058 v = t;
7059 else
7060 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7061 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7063 if (!integer_zerop (bias))
7065 bias = fold_convert_loc (clause_loc, sizetype, bias);
7066 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7067 TREE_TYPE (new_var), new_var,
7068 unshare_expr (bias));
7069 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7070 TREE_TYPE (ref), ref, bias);
7072 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7073 ref = fold_convert_loc (clause_loc, ptype, ref);
7074 tree m = create_tmp_var (ptype);
7075 gimplify_assign (m, new_var, stmt_seqp);
7076 new_var = m;
7077 m = create_tmp_var (ptype);
7078 gimplify_assign (m, ref, stmt_seqp);
7079 ref = m;
7080 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7081 tree body = create_artificial_label (UNKNOWN_LOCATION);
7082 tree end = create_artificial_label (UNKNOWN_LOCATION);
7083 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7084 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7085 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7086 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7088 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7089 tree decl_placeholder
7090 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7091 SET_DECL_VALUE_EXPR (placeholder, out);
7092 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7093 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7094 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7095 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7096 gimple_seq_add_seq (&sub_seq,
7097 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7098 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7099 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7100 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7102 else
7104 x = build2 (code, TREE_TYPE (out), out, priv);
7105 out = unshare_expr (out);
7106 gimplify_assign (out, x, &sub_seq);
7108 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7109 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7110 gimple_seq_add_stmt (&sub_seq, g);
7111 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7112 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7113 gimple_seq_add_stmt (&sub_seq, g);
7114 g = gimple_build_assign (i, PLUS_EXPR, i,
7115 build_int_cst (TREE_TYPE (i), 1));
7116 gimple_seq_add_stmt (&sub_seq, g);
7117 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7118 gimple_seq_add_stmt (&sub_seq, g);
7119 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7121 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7123 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7125 if (omp_is_reference (var)
7126 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7127 TREE_TYPE (ref)))
7128 ref = build_fold_addr_expr_loc (clause_loc, ref);
7129 SET_DECL_VALUE_EXPR (placeholder, ref);
7130 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7131 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7132 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7133 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7134 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7136 else
7138 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7139 ref = build_outer_var_ref (var, ctx);
7140 gimplify_assign (ref, x, &sub_seq);
7144 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7146 gimple_seq_add_stmt (stmt_seqp, stmt);
7148 gimple_seq_add_seq (stmt_seqp, sub_seq);
7150 if (clist)
7152 gimple_seq_add_seq (stmt_seqp, *clist);
7153 *clist = NULL;
7156 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7158 gimple_seq_add_stmt (stmt_seqp, stmt);
7162 /* Generate code to implement the COPYPRIVATE clauses. */
7164 static void
7165 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7166 omp_context *ctx)
7168 tree c;
7170 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7172 tree var, new_var, ref, x;
7173 bool by_ref;
7174 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7176 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7177 continue;
7179 var = OMP_CLAUSE_DECL (c);
7180 by_ref = use_pointer_for_field (var, NULL);
7182 ref = build_sender_ref (var, ctx);
7183 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7184 if (by_ref)
7186 x = build_fold_addr_expr_loc (clause_loc, new_var);
7187 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7189 gimplify_assign (ref, x, slist);
7191 ref = build_receiver_ref (var, false, ctx);
7192 if (by_ref)
7194 ref = fold_convert_loc (clause_loc,
7195 build_pointer_type (TREE_TYPE (new_var)),
7196 ref);
7197 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7199 if (omp_is_reference (var))
7201 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7202 ref = build_simple_mem_ref_loc (clause_loc, ref);
7203 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7205 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7206 gimplify_and_add (x, rlist);
7211 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7212 and REDUCTION from the sender (aka parent) side. */
7214 static void
7215 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7216 omp_context *ctx)
7218 tree c, t;
7219 int ignored_looptemp = 0;
7220 bool is_taskloop = false;
7222 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7223 by GOMP_taskloop. */
7224 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7226 ignored_looptemp = 2;
7227 is_taskloop = true;
7230 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7232 tree val, ref, x, var;
7233 bool by_ref, do_in = false, do_out = false;
7234 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7236 switch (OMP_CLAUSE_CODE (c))
7238 case OMP_CLAUSE_PRIVATE:
7239 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7240 break;
7241 continue;
7242 case OMP_CLAUSE_FIRSTPRIVATE:
7243 case OMP_CLAUSE_COPYIN:
7244 case OMP_CLAUSE_LASTPRIVATE:
7245 case OMP_CLAUSE_IN_REDUCTION:
7246 case OMP_CLAUSE__REDUCTEMP_:
7247 break;
7248 case OMP_CLAUSE_REDUCTION:
7249 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7250 continue;
7251 break;
7252 case OMP_CLAUSE_SHARED:
7253 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7254 break;
7255 continue;
7256 case OMP_CLAUSE__LOOPTEMP_:
7257 if (ignored_looptemp)
7259 ignored_looptemp--;
7260 continue;
7262 break;
7263 default:
7264 continue;
7267 val = OMP_CLAUSE_DECL (c);
7268 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7269 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7270 && TREE_CODE (val) == MEM_REF)
7272 val = TREE_OPERAND (val, 0);
7273 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7274 val = TREE_OPERAND (val, 0);
7275 if (TREE_CODE (val) == INDIRECT_REF
7276 || TREE_CODE (val) == ADDR_EXPR)
7277 val = TREE_OPERAND (val, 0);
7278 if (is_variable_sized (val))
7279 continue;
7282 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7283 outer taskloop region. */
7284 omp_context *ctx_for_o = ctx;
7285 if (is_taskloop
7286 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7287 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7288 ctx_for_o = ctx->outer;
7290 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7292 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7293 && is_global_var (var)
7294 && (val == OMP_CLAUSE_DECL (c)
7295 || !is_task_ctx (ctx)
7296 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7297 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7298 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7299 != POINTER_TYPE)))))
7300 continue;
7302 t = omp_member_access_dummy_var (var);
7303 if (t)
7305 var = DECL_VALUE_EXPR (var);
7306 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7307 if (o != t)
7308 var = unshare_and_remap (var, t, o);
7309 else
7310 var = unshare_expr (var);
7313 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7315 /* Handle taskloop firstprivate/lastprivate, where the
7316 lastprivate on GIMPLE_OMP_TASK is represented as
7317 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7318 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7319 x = omp_build_component_ref (ctx->sender_decl, f);
7320 if (use_pointer_for_field (val, ctx))
7321 var = build_fold_addr_expr (var);
7322 gimplify_assign (x, var, ilist);
7323 DECL_ABSTRACT_ORIGIN (f) = NULL;
7324 continue;
7327 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7328 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7329 || val == OMP_CLAUSE_DECL (c))
7330 && is_variable_sized (val))
7331 continue;
7332 by_ref = use_pointer_for_field (val, NULL);
7334 switch (OMP_CLAUSE_CODE (c))
7336 case OMP_CLAUSE_FIRSTPRIVATE:
7337 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7338 && !by_ref
7339 && is_task_ctx (ctx))
7340 TREE_NO_WARNING (var) = 1;
7341 do_in = true;
7342 break;
7344 case OMP_CLAUSE_PRIVATE:
7345 case OMP_CLAUSE_COPYIN:
7346 case OMP_CLAUSE__LOOPTEMP_:
7347 case OMP_CLAUSE__REDUCTEMP_:
7348 do_in = true;
7349 break;
7351 case OMP_CLAUSE_LASTPRIVATE:
7352 if (by_ref || omp_is_reference (val))
7354 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7355 continue;
7356 do_in = true;
7358 else
7360 do_out = true;
7361 if (lang_hooks.decls.omp_private_outer_ref (val))
7362 do_in = true;
7364 break;
7366 case OMP_CLAUSE_REDUCTION:
7367 case OMP_CLAUSE_IN_REDUCTION:
7368 do_in = true;
7369 if (val == OMP_CLAUSE_DECL (c))
7371 if (is_task_ctx (ctx))
7372 by_ref = use_pointer_for_field (val, ctx);
7373 else
7374 do_out = !(by_ref || omp_is_reference (val));
7376 else
7377 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7378 break;
7380 default:
7381 gcc_unreachable ();
7384 if (do_in)
7386 ref = build_sender_ref (val, ctx);
7387 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7388 gimplify_assign (ref, x, ilist);
7389 if (is_task_ctx (ctx))
7390 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7393 if (do_out)
7395 ref = build_sender_ref (val, ctx);
7396 gimplify_assign (var, ref, olist);
7401 /* Generate code to implement SHARED from the sender (aka parent)
7402 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7403 list things that got automatically shared. */
7405 static void
7406 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7408 tree var, ovar, nvar, t, f, x, record_type;
7410 if (ctx->record_type == NULL)
7411 return;
7413 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7414 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7416 ovar = DECL_ABSTRACT_ORIGIN (f);
7417 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7418 continue;
7420 nvar = maybe_lookup_decl (ovar, ctx);
7421 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7422 continue;
7424 /* If CTX is a nested parallel directive. Find the immediately
7425 enclosing parallel or workshare construct that contains a
7426 mapping for OVAR. */
7427 var = lookup_decl_in_outer_ctx (ovar, ctx);
7429 t = omp_member_access_dummy_var (var);
7430 if (t)
7432 var = DECL_VALUE_EXPR (var);
7433 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7434 if (o != t)
7435 var = unshare_and_remap (var, t, o);
7436 else
7437 var = unshare_expr (var);
7440 if (use_pointer_for_field (ovar, ctx))
7442 x = build_sender_ref (ovar, ctx);
7443 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7444 && TREE_TYPE (f) == TREE_TYPE (ovar))
7446 gcc_assert (is_parallel_ctx (ctx)
7447 && DECL_ARTIFICIAL (ovar));
7448 /* _condtemp_ clause. */
7449 var = build_constructor (TREE_TYPE (x), NULL);
7451 else
7452 var = build_fold_addr_expr (var);
7453 gimplify_assign (x, var, ilist);
7455 else
7457 x = build_sender_ref (ovar, ctx);
7458 gimplify_assign (x, var, ilist);
7460 if (!TREE_READONLY (var)
7461 /* We don't need to receive a new reference to a result
7462 or parm decl. In fact we may not store to it as we will
7463 invalidate any pending RSO and generate wrong gimple
7464 during inlining. */
7465 && !((TREE_CODE (var) == RESULT_DECL
7466 || TREE_CODE (var) == PARM_DECL)
7467 && DECL_BY_REFERENCE (var)))
7469 x = build_sender_ref (ovar, ctx);
7470 gimplify_assign (var, x, olist);
7476 /* Emit an OpenACC head marker call, encapulating the partitioning and
7477 other information that must be processed by the target compiler.
7478 Return the maximum number of dimensions the associated loop might
7479 be partitioned over. */
7481 static unsigned
7482 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7483 gimple_seq *seq, omp_context *ctx)
7485 unsigned levels = 0;
7486 unsigned tag = 0;
7487 tree gang_static = NULL_TREE;
7488 auto_vec<tree, 5> args;
7490 args.quick_push (build_int_cst
7491 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7492 args.quick_push (ddvar);
7493 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7495 switch (OMP_CLAUSE_CODE (c))
7497 case OMP_CLAUSE_GANG:
7498 tag |= OLF_DIM_GANG;
7499 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7500 /* static:* is represented by -1, and we can ignore it, as
7501 scheduling is always static. */
7502 if (gang_static && integer_minus_onep (gang_static))
7503 gang_static = NULL_TREE;
7504 levels++;
7505 break;
7507 case OMP_CLAUSE_WORKER:
7508 tag |= OLF_DIM_WORKER;
7509 levels++;
7510 break;
7512 case OMP_CLAUSE_VECTOR:
7513 tag |= OLF_DIM_VECTOR;
7514 levels++;
7515 break;
7517 case OMP_CLAUSE_SEQ:
7518 tag |= OLF_SEQ;
7519 break;
7521 case OMP_CLAUSE_AUTO:
7522 tag |= OLF_AUTO;
7523 break;
7525 case OMP_CLAUSE_INDEPENDENT:
7526 tag |= OLF_INDEPENDENT;
7527 break;
7529 case OMP_CLAUSE_TILE:
7530 tag |= OLF_TILE;
7531 break;
7533 default:
7534 continue;
7538 if (gang_static)
7540 if (DECL_P (gang_static))
7541 gang_static = build_outer_var_ref (gang_static, ctx);
7542 tag |= OLF_GANG_STATIC;
7545 /* In a parallel region, loops are implicitly INDEPENDENT. */
7546 omp_context *tgt = enclosing_target_ctx (ctx);
7547 if (!tgt || is_oacc_parallel_or_serial (tgt))
7548 tag |= OLF_INDEPENDENT;
7550 if (tag & OLF_TILE)
7551 /* Tiling could use all 3 levels. */
7552 levels = 3;
7553 else
7555 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7556 Ensure at least one level, or 2 for possible auto
7557 partitioning */
7558 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7559 << OLF_DIM_BASE) | OLF_SEQ));
7561 if (levels < 1u + maybe_auto)
7562 levels = 1u + maybe_auto;
7565 args.quick_push (build_int_cst (integer_type_node, levels));
7566 args.quick_push (build_int_cst (integer_type_node, tag));
7567 if (gang_static)
7568 args.quick_push (gang_static);
7570 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7571 gimple_set_location (call, loc);
7572 gimple_set_lhs (call, ddvar);
7573 gimple_seq_add_stmt (seq, call);
7575 return levels;
7578 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7579 partitioning level of the enclosed region. */
7581 static void
7582 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7583 tree tofollow, gimple_seq *seq)
7585 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7586 : IFN_UNIQUE_OACC_TAIL_MARK);
7587 tree marker = build_int_cst (integer_type_node, marker_kind);
7588 int nargs = 2 + (tofollow != NULL_TREE);
7589 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7590 marker, ddvar, tofollow);
7591 gimple_set_location (call, loc);
7592 gimple_set_lhs (call, ddvar);
7593 gimple_seq_add_stmt (seq, call);
7596 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7597 the loop clauses, from which we extract reductions. Initialize
7598 HEAD and TAIL. */
7600 static void
7601 lower_oacc_head_tail (location_t loc, tree clauses,
7602 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7604 bool inner = false;
7605 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7606 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7608 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7609 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7610 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7612 gcc_assert (count);
7613 for (unsigned done = 1; count; count--, done++)
7615 gimple_seq fork_seq = NULL;
7616 gimple_seq join_seq = NULL;
7618 tree place = build_int_cst (integer_type_node, -1);
7619 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7620 fork_kind, ddvar, place);
7621 gimple_set_location (fork, loc);
7622 gimple_set_lhs (fork, ddvar);
7624 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7625 join_kind, ddvar, place);
7626 gimple_set_location (join, loc);
7627 gimple_set_lhs (join, ddvar);
7629 /* Mark the beginning of this level sequence. */
7630 if (inner)
7631 lower_oacc_loop_marker (loc, ddvar, true,
7632 build_int_cst (integer_type_node, count),
7633 &fork_seq);
7634 lower_oacc_loop_marker (loc, ddvar, false,
7635 build_int_cst (integer_type_node, done),
7636 &join_seq);
7638 lower_oacc_reductions (loc, clauses, place, inner,
7639 fork, join, &fork_seq, &join_seq, ctx);
7641 /* Append this level to head. */
7642 gimple_seq_add_seq (head, fork_seq);
7643 /* Prepend it to tail. */
7644 gimple_seq_add_seq (&join_seq, *tail);
7645 *tail = join_seq;
7647 inner = true;
7650 /* Mark the end of the sequence. */
7651 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7652 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7655 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7656 catch handler and return it. This prevents programs from violating the
7657 structured block semantics with throws. */
7659 static gimple_seq
7660 maybe_catch_exception (gimple_seq body)
7662 gimple *g;
7663 tree decl;
7665 if (!flag_exceptions)
7666 return body;
7668 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7669 decl = lang_hooks.eh_protect_cleanup_actions ();
7670 else
7671 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7673 g = gimple_build_eh_must_not_throw (decl);
7674 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7675 GIMPLE_TRY_CATCH);
7677 return gimple_seq_alloc_with_stmt (g);
7681 /* Routines to lower OMP directives into OMP-GIMPLE. */
7683 /* If ctx is a worksharing context inside of a cancellable parallel
7684 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7685 and conditional branch to parallel's cancel_label to handle
7686 cancellation in the implicit barrier. */
7688 static void
7689 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7690 gimple_seq *body)
7692 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7693 if (gimple_omp_return_nowait_p (omp_return))
7694 return;
7695 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7696 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7697 && outer->cancellable)
7699 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7700 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7701 tree lhs = create_tmp_var (c_bool_type);
7702 gimple_omp_return_set_lhs (omp_return, lhs);
7703 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7704 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7705 fold_convert (c_bool_type,
7706 boolean_false_node),
7707 outer->cancel_label, fallthru_label);
7708 gimple_seq_add_stmt (body, g);
7709 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7711 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7712 return;
7715 /* Find the first task_reduction or reduction clause or return NULL
7716 if there are none. */
7718 static inline tree
7719 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7720 enum omp_clause_code ccode)
7722 while (1)
7724 clauses = omp_find_clause (clauses, ccode);
7725 if (clauses == NULL_TREE)
7726 return NULL_TREE;
7727 if (ccode != OMP_CLAUSE_REDUCTION
7728 || code == OMP_TASKLOOP
7729 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7730 return clauses;
7731 clauses = OMP_CLAUSE_CHAIN (clauses);
7735 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7736 gimple_seq *, gimple_seq *);
7738 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7739 CTX is the enclosing OMP context for the current statement. */
7741 static void
7742 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7744 tree block, control;
7745 gimple_stmt_iterator tgsi;
7746 gomp_sections *stmt;
7747 gimple *t;
7748 gbind *new_stmt, *bind;
7749 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7751 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7753 push_gimplify_context ();
7755 dlist = NULL;
7756 ilist = NULL;
7758 tree rclauses
7759 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7760 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7761 tree rtmp = NULL_TREE;
7762 if (rclauses)
7764 tree type = build_pointer_type (pointer_sized_int_node);
7765 tree temp = create_tmp_var (type);
7766 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7767 OMP_CLAUSE_DECL (c) = temp;
7768 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7769 gimple_omp_sections_set_clauses (stmt, c);
7770 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7771 gimple_omp_sections_clauses (stmt),
7772 &ilist, &tred_dlist);
7773 rclauses = c;
7774 rtmp = make_ssa_name (type);
7775 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7778 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7779 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7781 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7782 &ilist, &dlist, ctx, NULL);
7784 control = create_tmp_var (unsigned_type_node, ".section");
7785 gimple_omp_sections_set_control (stmt, control);
7787 new_body = gimple_omp_body (stmt);
7788 gimple_omp_set_body (stmt, NULL);
7789 tgsi = gsi_start (new_body);
7790 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7792 omp_context *sctx;
7793 gimple *sec_start;
7795 sec_start = gsi_stmt (tgsi);
7796 sctx = maybe_lookup_ctx (sec_start);
7797 gcc_assert (sctx);
7799 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7800 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7801 GSI_CONTINUE_LINKING);
7802 gimple_omp_set_body (sec_start, NULL);
7804 if (gsi_one_before_end_p (tgsi))
7806 gimple_seq l = NULL;
7807 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7808 &ilist, &l, &clist, ctx);
7809 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7810 gimple_omp_section_set_last (sec_start);
7813 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7814 GSI_CONTINUE_LINKING);
7817 block = make_node (BLOCK);
7818 bind = gimple_build_bind (NULL, new_body, block);
7820 olist = NULL;
7821 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7822 &clist, ctx);
7823 if (clist)
7825 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7826 gcall *g = gimple_build_call (fndecl, 0);
7827 gimple_seq_add_stmt (&olist, g);
7828 gimple_seq_add_seq (&olist, clist);
7829 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7830 g = gimple_build_call (fndecl, 0);
7831 gimple_seq_add_stmt (&olist, g);
7834 block = make_node (BLOCK);
7835 new_stmt = gimple_build_bind (NULL, NULL, block);
7836 gsi_replace (gsi_p, new_stmt, true);
7838 pop_gimplify_context (new_stmt);
7839 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7840 BLOCK_VARS (block) = gimple_bind_vars (bind);
7841 if (BLOCK_VARS (block))
7842 TREE_USED (block) = 1;
7844 new_body = NULL;
7845 gimple_seq_add_seq (&new_body, ilist);
7846 gimple_seq_add_stmt (&new_body, stmt);
7847 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7848 gimple_seq_add_stmt (&new_body, bind);
7850 t = gimple_build_omp_continue (control, control);
7851 gimple_seq_add_stmt (&new_body, t);
7853 gimple_seq_add_seq (&new_body, olist);
7854 if (ctx->cancellable)
7855 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7856 gimple_seq_add_seq (&new_body, dlist);
7858 new_body = maybe_catch_exception (new_body);
7860 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7861 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7862 t = gimple_build_omp_return (nowait);
7863 gimple_seq_add_stmt (&new_body, t);
7864 gimple_seq_add_seq (&new_body, tred_dlist);
7865 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7867 if (rclauses)
7868 OMP_CLAUSE_DECL (rclauses) = rtmp;
7870 gimple_bind_set_body (new_stmt, new_body);
7874 /* A subroutine of lower_omp_single. Expand the simple form of
7875 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7877 if (GOMP_single_start ())
7878 BODY;
7879 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7881 FIXME. It may be better to delay expanding the logic of this until
7882 pass_expand_omp. The expanded logic may make the job more difficult
7883 to a synchronization analysis pass. */
7885 static void
7886 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7888 location_t loc = gimple_location (single_stmt);
7889 tree tlabel = create_artificial_label (loc);
7890 tree flabel = create_artificial_label (loc);
7891 gimple *call, *cond;
7892 tree lhs, decl;
7894 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7895 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7896 call = gimple_build_call (decl, 0);
7897 gimple_call_set_lhs (call, lhs);
7898 gimple_seq_add_stmt (pre_p, call);
7900 cond = gimple_build_cond (EQ_EXPR, lhs,
7901 fold_convert_loc (loc, TREE_TYPE (lhs),
7902 boolean_true_node),
7903 tlabel, flabel);
7904 gimple_seq_add_stmt (pre_p, cond);
7905 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7906 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7907 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7911 /* A subroutine of lower_omp_single. Expand the simple form of
7912 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7914 #pragma omp single copyprivate (a, b, c)
7916 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7919 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7921 BODY;
7922 copyout.a = a;
7923 copyout.b = b;
7924 copyout.c = c;
7925 GOMP_single_copy_end (&copyout);
7927 else
7929 a = copyout_p->a;
7930 b = copyout_p->b;
7931 c = copyout_p->c;
7933 GOMP_barrier ();
7936 FIXME. It may be better to delay expanding the logic of this until
7937 pass_expand_omp. The expanded logic may make the job more difficult
7938 to a synchronization analysis pass. */
7940 static void
7941 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7942 omp_context *ctx)
7944 tree ptr_type, t, l0, l1, l2, bfn_decl;
7945 gimple_seq copyin_seq;
7946 location_t loc = gimple_location (single_stmt);
7948 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7950 ptr_type = build_pointer_type (ctx->record_type);
7951 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7953 l0 = create_artificial_label (loc);
7954 l1 = create_artificial_label (loc);
7955 l2 = create_artificial_label (loc);
7957 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7958 t = build_call_expr_loc (loc, bfn_decl, 0);
7959 t = fold_convert_loc (loc, ptr_type, t);
7960 gimplify_assign (ctx->receiver_decl, t, pre_p);
7962 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7963 build_int_cst (ptr_type, 0));
7964 t = build3 (COND_EXPR, void_type_node, t,
7965 build_and_jump (&l0), build_and_jump (&l1));
7966 gimplify_and_add (t, pre_p);
7968 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7970 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7972 copyin_seq = NULL;
7973 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7974 &copyin_seq, ctx);
7976 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7977 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7978 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7979 gimplify_and_add (t, pre_p);
7981 t = build_and_jump (&l2);
7982 gimplify_and_add (t, pre_p);
7984 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7986 gimple_seq_add_seq (pre_p, copyin_seq);
7988 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7992 /* Expand code for an OpenMP single directive. */
7994 static void
7995 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7997 tree block;
7998 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7999 gbind *bind;
8000 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8002 push_gimplify_context ();
8004 block = make_node (BLOCK);
8005 bind = gimple_build_bind (NULL, NULL, block);
8006 gsi_replace (gsi_p, bind, true);
8007 bind_body = NULL;
8008 dlist = NULL;
8009 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8010 &bind_body, &dlist, ctx, NULL);
8011 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8013 gimple_seq_add_stmt (&bind_body, single_stmt);
8015 if (ctx->record_type)
8016 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8017 else
8018 lower_omp_single_simple (single_stmt, &bind_body);
8020 gimple_omp_set_body (single_stmt, NULL);
8022 gimple_seq_add_seq (&bind_body, dlist);
8024 bind_body = maybe_catch_exception (bind_body);
8026 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8027 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8028 gimple *g = gimple_build_omp_return (nowait);
8029 gimple_seq_add_stmt (&bind_body_tail, g);
8030 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8031 if (ctx->record_type)
8033 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8034 tree clobber = build_clobber (ctx->record_type);
8035 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8036 clobber), GSI_SAME_STMT);
8038 gimple_seq_add_seq (&bind_body, bind_body_tail);
8039 gimple_bind_set_body (bind, bind_body);
8041 pop_gimplify_context (bind);
8043 gimple_bind_append_vars (bind, ctx->block_vars);
8044 BLOCK_VARS (block) = ctx->block_vars;
8045 if (BLOCK_VARS (block))
8046 TREE_USED (block) = 1;
8050 /* Expand code for an OpenMP master directive. */
8052 static void
8053 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8055 tree block, lab = NULL, x, bfn_decl;
8056 gimple *stmt = gsi_stmt (*gsi_p);
8057 gbind *bind;
8058 location_t loc = gimple_location (stmt);
8059 gimple_seq tseq;
8061 push_gimplify_context ();
8063 block = make_node (BLOCK);
8064 bind = gimple_build_bind (NULL, NULL, block);
8065 gsi_replace (gsi_p, bind, true);
8066 gimple_bind_add_stmt (bind, stmt);
8068 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8069 x = build_call_expr_loc (loc, bfn_decl, 0);
8070 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8071 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8072 tseq = NULL;
8073 gimplify_and_add (x, &tseq);
8074 gimple_bind_add_seq (bind, tseq);
8076 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8077 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8078 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8079 gimple_omp_set_body (stmt, NULL);
8081 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8083 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8085 pop_gimplify_context (bind);
8087 gimple_bind_append_vars (bind, ctx->block_vars);
8088 BLOCK_VARS (block) = ctx->block_vars;
8091 /* Helper function for lower_omp_task_reductions. For a specific PASS
8092 find out the current clause it should be processed, or return false
8093 if all have been processed already. */
8095 static inline bool
8096 omp_task_reduction_iterate (int pass, enum tree_code code,
8097 enum omp_clause_code ccode, tree *c, tree *decl,
8098 tree *type, tree *next)
8100 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8102 if (ccode == OMP_CLAUSE_REDUCTION
8103 && code != OMP_TASKLOOP
8104 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8105 continue;
8106 *decl = OMP_CLAUSE_DECL (*c);
8107 *type = TREE_TYPE (*decl);
8108 if (TREE_CODE (*decl) == MEM_REF)
8110 if (pass != 1)
8111 continue;
8113 else
8115 if (omp_is_reference (*decl))
8116 *type = TREE_TYPE (*type);
8117 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8118 continue;
8120 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8121 return true;
8123 *decl = NULL_TREE;
8124 *type = NULL_TREE;
8125 *next = NULL_TREE;
8126 return false;
8129 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8130 OMP_TASKGROUP only with task modifier). Register mapping of those in
8131 START sequence and reducing them and unregister them in the END sequence. */
8133 static void
8134 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8135 gimple_seq *start, gimple_seq *end)
8137 enum omp_clause_code ccode
8138 = (code == OMP_TASKGROUP
8139 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8140 tree cancellable = NULL_TREE;
8141 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8142 if (clauses == NULL_TREE)
8143 return;
8144 if (code == OMP_FOR || code == OMP_SECTIONS)
8146 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8147 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8148 && outer->cancellable)
8150 cancellable = error_mark_node;
8151 break;
8153 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8154 break;
8156 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8157 tree *last = &TYPE_FIELDS (record_type);
8158 unsigned cnt = 0;
8159 if (cancellable)
8161 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8162 ptr_type_node);
8163 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8164 integer_type_node);
8165 *last = field;
8166 DECL_CHAIN (field) = ifield;
8167 last = &DECL_CHAIN (ifield);
8168 DECL_CONTEXT (field) = record_type;
8169 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8170 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8171 DECL_CONTEXT (ifield) = record_type;
8172 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8173 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8175 for (int pass = 0; pass < 2; pass++)
8177 tree decl, type, next;
8178 for (tree c = clauses;
8179 omp_task_reduction_iterate (pass, code, ccode,
8180 &c, &decl, &type, &next); c = next)
8182 ++cnt;
8183 tree new_type = type;
8184 if (ctx->outer)
8185 new_type = remap_type (type, &ctx->outer->cb);
8186 tree field
8187 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8188 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8189 new_type);
8190 if (DECL_P (decl) && type == TREE_TYPE (decl))
8192 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8193 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8194 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8196 else
8197 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8198 DECL_CONTEXT (field) = record_type;
8199 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8200 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8201 *last = field;
8202 last = &DECL_CHAIN (field);
8203 tree bfield
8204 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8205 boolean_type_node);
8206 DECL_CONTEXT (bfield) = record_type;
8207 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8208 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8209 *last = bfield;
8210 last = &DECL_CHAIN (bfield);
8213 *last = NULL_TREE;
8214 layout_type (record_type);
8216 /* Build up an array which registers with the runtime all the reductions
8217 and deregisters them at the end. Format documented in libgomp/task.c. */
8218 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8219 tree avar = create_tmp_var_raw (atype);
8220 gimple_add_tmp_var (avar);
8221 TREE_ADDRESSABLE (avar) = 1;
8222 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8223 NULL_TREE, NULL_TREE);
8224 tree t = build_int_cst (pointer_sized_int_node, cnt);
8225 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8226 gimple_seq seq = NULL;
8227 tree sz = fold_convert (pointer_sized_int_node,
8228 TYPE_SIZE_UNIT (record_type));
8229 int cachesz = 64;
8230 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8231 build_int_cst (pointer_sized_int_node, cachesz - 1));
8232 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8233 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8234 ctx->task_reductions.create (1 + cnt);
8235 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8236 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8237 ? sz : NULL_TREE);
8238 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8239 gimple_seq_add_seq (start, seq);
8240 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8241 NULL_TREE, NULL_TREE);
8242 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8243 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8244 NULL_TREE, NULL_TREE);
8245 t = build_int_cst (pointer_sized_int_node,
8246 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8247 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8248 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8249 NULL_TREE, NULL_TREE);
8250 t = build_int_cst (pointer_sized_int_node, -1);
8251 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8252 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8253 NULL_TREE, NULL_TREE);
8254 t = build_int_cst (pointer_sized_int_node, 0);
8255 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8257 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8258 and for each task reduction checks a bool right after the private variable
8259 within that thread's chunk; if the bool is clear, it hasn't been
8260 initialized and thus isn't going to be reduced nor destructed, otherwise
8261 reduce and destruct it. */
8262 tree idx = create_tmp_var (size_type_node);
8263 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8264 tree num_thr_sz = create_tmp_var (size_type_node);
8265 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8266 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8267 tree lab3 = NULL_TREE;
8268 gimple *g;
8269 if (code == OMP_FOR || code == OMP_SECTIONS)
8271 /* For worksharing constructs, only perform it in the master thread,
8272 with the exception of cancelled implicit barriers - then only handle
8273 the current thread. */
8274 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8275 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8276 tree thr_num = create_tmp_var (integer_type_node);
8277 g = gimple_build_call (t, 0);
8278 gimple_call_set_lhs (g, thr_num);
8279 gimple_seq_add_stmt (end, g);
8280 if (cancellable)
8282 tree c;
8283 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8284 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8285 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8286 if (code == OMP_FOR)
8287 c = gimple_omp_for_clauses (ctx->stmt);
8288 else /* if (code == OMP_SECTIONS) */
8289 c = gimple_omp_sections_clauses (ctx->stmt);
8290 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8291 cancellable = c;
8292 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8293 lab5, lab6);
8294 gimple_seq_add_stmt (end, g);
8295 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8296 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8297 gimple_seq_add_stmt (end, g);
8298 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8299 build_one_cst (TREE_TYPE (idx)));
8300 gimple_seq_add_stmt (end, g);
8301 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8302 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8304 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8305 gimple_seq_add_stmt (end, g);
8306 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8308 if (code != OMP_PARALLEL)
8310 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8311 tree num_thr = create_tmp_var (integer_type_node);
8312 g = gimple_build_call (t, 0);
8313 gimple_call_set_lhs (g, num_thr);
8314 gimple_seq_add_stmt (end, g);
8315 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8316 gimple_seq_add_stmt (end, g);
8317 if (cancellable)
8318 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8320 else
8322 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8323 OMP_CLAUSE__REDUCTEMP_);
8324 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8325 t = fold_convert (size_type_node, t);
8326 gimplify_assign (num_thr_sz, t, end);
8328 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8329 NULL_TREE, NULL_TREE);
8330 tree data = create_tmp_var (pointer_sized_int_node);
8331 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8332 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8333 tree ptr;
8334 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8335 ptr = create_tmp_var (build_pointer_type (record_type));
8336 else
8337 ptr = create_tmp_var (ptr_type_node);
8338 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8340 tree field = TYPE_FIELDS (record_type);
8341 cnt = 0;
8342 if (cancellable)
8343 field = DECL_CHAIN (DECL_CHAIN (field));
8344 for (int pass = 0; pass < 2; pass++)
8346 tree decl, type, next;
8347 for (tree c = clauses;
8348 omp_task_reduction_iterate (pass, code, ccode,
8349 &c, &decl, &type, &next); c = next)
8351 tree var = decl, ref;
8352 if (TREE_CODE (decl) == MEM_REF)
8354 var = TREE_OPERAND (var, 0);
8355 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8356 var = TREE_OPERAND (var, 0);
8357 tree v = var;
8358 if (TREE_CODE (var) == ADDR_EXPR)
8359 var = TREE_OPERAND (var, 0);
8360 else if (TREE_CODE (var) == INDIRECT_REF)
8361 var = TREE_OPERAND (var, 0);
8362 tree orig_var = var;
8363 if (is_variable_sized (var))
8365 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8366 var = DECL_VALUE_EXPR (var);
8367 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8368 var = TREE_OPERAND (var, 0);
8369 gcc_assert (DECL_P (var));
8371 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8372 if (orig_var != var)
8373 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8374 else if (TREE_CODE (v) == ADDR_EXPR)
8375 t = build_fold_addr_expr (t);
8376 else if (TREE_CODE (v) == INDIRECT_REF)
8377 t = build_fold_indirect_ref (t);
8378 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8380 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8381 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8382 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8384 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8385 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8386 fold_convert (size_type_node,
8387 TREE_OPERAND (decl, 1)));
8389 else
8391 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8392 if (!omp_is_reference (decl))
8393 t = build_fold_addr_expr (t);
8395 t = fold_convert (pointer_sized_int_node, t);
8396 seq = NULL;
8397 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8398 gimple_seq_add_seq (start, seq);
8399 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8400 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8401 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8402 t = unshare_expr (byte_position (field));
8403 t = fold_convert (pointer_sized_int_node, t);
8404 ctx->task_reduction_map->put (c, cnt);
8405 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8406 ? t : NULL_TREE);
8407 seq = NULL;
8408 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8409 gimple_seq_add_seq (start, seq);
8410 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8411 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8412 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8414 tree bfield = DECL_CHAIN (field);
8415 tree cond;
8416 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8417 /* In parallel or worksharing all threads unconditionally
8418 initialize all their task reduction private variables. */
8419 cond = boolean_true_node;
8420 else if (TREE_TYPE (ptr) == ptr_type_node)
8422 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8423 unshare_expr (byte_position (bfield)));
8424 seq = NULL;
8425 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8426 gimple_seq_add_seq (end, seq);
8427 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8428 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8429 build_int_cst (pbool, 0));
8431 else
8432 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8433 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8434 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8435 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8436 tree condv = create_tmp_var (boolean_type_node);
8437 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8438 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8439 lab3, lab4);
8440 gimple_seq_add_stmt (end, g);
8441 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8442 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8444 /* If this reduction doesn't need destruction and parallel
8445 has been cancelled, there is nothing to do for this
8446 reduction, so jump around the merge operation. */
8447 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8448 g = gimple_build_cond (NE_EXPR, cancellable,
8449 build_zero_cst (TREE_TYPE (cancellable)),
8450 lab4, lab5);
8451 gimple_seq_add_stmt (end, g);
8452 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8455 tree new_var;
8456 if (TREE_TYPE (ptr) == ptr_type_node)
8458 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8459 unshare_expr (byte_position (field)));
8460 seq = NULL;
8461 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8462 gimple_seq_add_seq (end, seq);
8463 tree pbool = build_pointer_type (TREE_TYPE (field));
8464 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8465 build_int_cst (pbool, 0));
8467 else
8468 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8469 build_simple_mem_ref (ptr), field, NULL_TREE);
8471 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8472 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8473 ref = build_simple_mem_ref (ref);
8474 /* reduction(-:var) sums up the partial results, so it acts
8475 identically to reduction(+:var). */
8476 if (rcode == MINUS_EXPR)
8477 rcode = PLUS_EXPR;
8478 if (TREE_CODE (decl) == MEM_REF)
8480 tree type = TREE_TYPE (new_var);
8481 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8482 tree i = create_tmp_var (TREE_TYPE (v));
8483 tree ptype = build_pointer_type (TREE_TYPE (type));
8484 if (DECL_P (v))
8486 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8487 tree vv = create_tmp_var (TREE_TYPE (v));
8488 gimplify_assign (vv, v, start);
8489 v = vv;
8491 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8492 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8493 new_var = build_fold_addr_expr (new_var);
8494 new_var = fold_convert (ptype, new_var);
8495 ref = fold_convert (ptype, ref);
8496 tree m = create_tmp_var (ptype);
8497 gimplify_assign (m, new_var, end);
8498 new_var = m;
8499 m = create_tmp_var (ptype);
8500 gimplify_assign (m, ref, end);
8501 ref = m;
8502 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8503 tree body = create_artificial_label (UNKNOWN_LOCATION);
8504 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8505 gimple_seq_add_stmt (end, gimple_build_label (body));
8506 tree priv = build_simple_mem_ref (new_var);
8507 tree out = build_simple_mem_ref (ref);
8508 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8510 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8511 tree decl_placeholder
8512 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8513 tree lab6 = NULL_TREE;
8514 if (cancellable)
8516 /* If this reduction needs destruction and parallel
8517 has been cancelled, jump around the merge operation
8518 to the destruction. */
8519 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8520 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8521 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8522 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8523 lab6, lab5);
8524 gimple_seq_add_stmt (end, g);
8525 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8527 SET_DECL_VALUE_EXPR (placeholder, out);
8528 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8529 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8530 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8531 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8532 gimple_seq_add_seq (end,
8533 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8534 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8535 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8537 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8538 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8540 if (cancellable)
8541 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8542 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8543 if (x)
8545 gimple_seq tseq = NULL;
8546 gimplify_stmt (&x, &tseq);
8547 gimple_seq_add_seq (end, tseq);
8550 else
8552 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8553 out = unshare_expr (out);
8554 gimplify_assign (out, x, end);
8556 gimple *g
8557 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8558 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8559 gimple_seq_add_stmt (end, g);
8560 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8561 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8562 gimple_seq_add_stmt (end, g);
8563 g = gimple_build_assign (i, PLUS_EXPR, i,
8564 build_int_cst (TREE_TYPE (i), 1));
8565 gimple_seq_add_stmt (end, g);
8566 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8567 gimple_seq_add_stmt (end, g);
8568 gimple_seq_add_stmt (end, gimple_build_label (endl));
8570 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8572 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8573 tree oldv = NULL_TREE;
8574 tree lab6 = NULL_TREE;
8575 if (cancellable)
8577 /* If this reduction needs destruction and parallel
8578 has been cancelled, jump around the merge operation
8579 to the destruction. */
8580 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8581 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8582 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8583 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8584 lab6, lab5);
8585 gimple_seq_add_stmt (end, g);
8586 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8588 if (omp_is_reference (decl)
8589 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8590 TREE_TYPE (ref)))
8591 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8592 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8593 tree refv = create_tmp_var (TREE_TYPE (ref));
8594 gimplify_assign (refv, ref, end);
8595 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8596 SET_DECL_VALUE_EXPR (placeholder, ref);
8597 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8598 tree d = maybe_lookup_decl (decl, ctx);
8599 gcc_assert (d);
8600 if (DECL_HAS_VALUE_EXPR_P (d))
8601 oldv = DECL_VALUE_EXPR (d);
8602 if (omp_is_reference (var))
8604 tree v = fold_convert (TREE_TYPE (d),
8605 build_fold_addr_expr (new_var));
8606 SET_DECL_VALUE_EXPR (d, v);
8608 else
8609 SET_DECL_VALUE_EXPR (d, new_var);
8610 DECL_HAS_VALUE_EXPR_P (d) = 1;
8611 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8612 if (oldv)
8613 SET_DECL_VALUE_EXPR (d, oldv);
8614 else
8616 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8617 DECL_HAS_VALUE_EXPR_P (d) = 0;
8619 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8620 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8621 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8622 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8623 if (cancellable)
8624 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8625 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8626 if (x)
8628 gimple_seq tseq = NULL;
8629 gimplify_stmt (&x, &tseq);
8630 gimple_seq_add_seq (end, tseq);
8633 else
8635 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8636 ref = unshare_expr (ref);
8637 gimplify_assign (ref, x, end);
8639 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8640 ++cnt;
8641 field = DECL_CHAIN (bfield);
8645 if (code == OMP_TASKGROUP)
8647 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8648 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8649 gimple_seq_add_stmt (start, g);
8651 else
8653 tree c;
8654 if (code == OMP_FOR)
8655 c = gimple_omp_for_clauses (ctx->stmt);
8656 else if (code == OMP_SECTIONS)
8657 c = gimple_omp_sections_clauses (ctx->stmt);
8658 else
8659 c = gimple_omp_taskreg_clauses (ctx->stmt);
8660 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8661 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8662 build_fold_addr_expr (avar));
8663 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8666 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8667 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8668 size_one_node));
8669 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8670 gimple_seq_add_stmt (end, g);
8671 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8672 if (code == OMP_FOR || code == OMP_SECTIONS)
8674 enum built_in_function bfn
8675 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8676 t = builtin_decl_explicit (bfn);
8677 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8678 tree arg;
8679 if (cancellable)
8681 arg = create_tmp_var (c_bool_type);
8682 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8683 cancellable));
8685 else
8686 arg = build_int_cst (c_bool_type, 0);
8687 g = gimple_build_call (t, 1, arg);
8689 else
8691 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8692 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8694 gimple_seq_add_stmt (end, g);
8695 t = build_constructor (atype, NULL);
8696 TREE_THIS_VOLATILE (t) = 1;
8697 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8700 /* Expand code for an OpenMP taskgroup directive. */
8702 static void
8703 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8705 gimple *stmt = gsi_stmt (*gsi_p);
8706 gcall *x;
8707 gbind *bind;
8708 gimple_seq dseq = NULL;
8709 tree block = make_node (BLOCK);
8711 bind = gimple_build_bind (NULL, NULL, block);
8712 gsi_replace (gsi_p, bind, true);
8713 gimple_bind_add_stmt (bind, stmt);
8715 push_gimplify_context ();
8717 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8719 gimple_bind_add_stmt (bind, x);
8721 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8722 gimple_omp_taskgroup_clauses (stmt),
8723 gimple_bind_body_ptr (bind), &dseq);
8725 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8726 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8727 gimple_omp_set_body (stmt, NULL);
8729 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8730 gimple_bind_add_seq (bind, dseq);
8732 pop_gimplify_context (bind);
8734 gimple_bind_append_vars (bind, ctx->block_vars);
8735 BLOCK_VARS (block) = ctx->block_vars;
8739 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8741 static void
8742 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8743 omp_context *ctx)
8745 struct omp_for_data fd;
8746 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8747 return;
8749 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8750 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8751 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8752 if (!fd.ordered)
8753 return;
8755 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8756 tree c = gimple_omp_ordered_clauses (ord_stmt);
8757 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8758 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8760 /* Merge depend clauses from multiple adjacent
8761 #pragma omp ordered depend(sink:...) constructs
8762 into one #pragma omp ordered depend(sink:...), so that
8763 we can optimize them together. */
8764 gimple_stmt_iterator gsi = *gsi_p;
8765 gsi_next (&gsi);
8766 while (!gsi_end_p (gsi))
8768 gimple *stmt = gsi_stmt (gsi);
8769 if (is_gimple_debug (stmt)
8770 || gimple_code (stmt) == GIMPLE_NOP)
8772 gsi_next (&gsi);
8773 continue;
8775 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8776 break;
8777 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8778 c = gimple_omp_ordered_clauses (ord_stmt2);
8779 if (c == NULL_TREE
8780 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8781 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8782 break;
8783 while (*list_p)
8784 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8785 *list_p = c;
8786 gsi_remove (&gsi, true);
8790 /* Canonicalize sink dependence clauses into one folded clause if
8791 possible.
8793 The basic algorithm is to create a sink vector whose first
8794 element is the GCD of all the first elements, and whose remaining
8795 elements are the minimum of the subsequent columns.
8797 We ignore dependence vectors whose first element is zero because
8798 such dependencies are known to be executed by the same thread.
8800 We take into account the direction of the loop, so a minimum
8801 becomes a maximum if the loop is iterating forwards. We also
8802 ignore sink clauses where the loop direction is unknown, or where
8803 the offsets are clearly invalid because they are not a multiple
8804 of the loop increment.
8806 For example:
8808 #pragma omp for ordered(2)
8809 for (i=0; i < N; ++i)
8810 for (j=0; j < M; ++j)
8812 #pragma omp ordered \
8813 depend(sink:i-8,j-2) \
8814 depend(sink:i,j-1) \ // Completely ignored because i+0.
8815 depend(sink:i-4,j-3) \
8816 depend(sink:i-6,j-4)
8817 #pragma omp ordered depend(source)
8820 Folded clause is:
8822 depend(sink:-gcd(8,4,6),-min(2,3,4))
8823 -or-
8824 depend(sink:-2,-2)
8827 /* FIXME: Computing GCD's where the first element is zero is
8828 non-trivial in the presence of collapsed loops. Do this later. */
8829 if (fd.collapse > 1)
8830 return;
8832 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8834 /* wide_int is not a POD so it must be default-constructed. */
8835 for (unsigned i = 0; i != 2 * len - 1; ++i)
8836 new (static_cast<void*>(folded_deps + i)) wide_int ();
8838 tree folded_dep = NULL_TREE;
8839 /* TRUE if the first dimension's offset is negative. */
8840 bool neg_offset_p = false;
8842 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8843 unsigned int i;
8844 while ((c = *list_p) != NULL)
8846 bool remove = false;
8848 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8849 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8850 goto next_ordered_clause;
8852 tree vec;
8853 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8854 vec && TREE_CODE (vec) == TREE_LIST;
8855 vec = TREE_CHAIN (vec), ++i)
8857 gcc_assert (i < len);
8859 /* omp_extract_for_data has canonicalized the condition. */
8860 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8861 || fd.loops[i].cond_code == GT_EXPR);
8862 bool forward = fd.loops[i].cond_code == LT_EXPR;
8863 bool maybe_lexically_later = true;
8865 /* While the committee makes up its mind, bail if we have any
8866 non-constant steps. */
8867 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8868 goto lower_omp_ordered_ret;
8870 tree itype = TREE_TYPE (TREE_VALUE (vec));
8871 if (POINTER_TYPE_P (itype))
8872 itype = sizetype;
8873 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8874 TYPE_PRECISION (itype),
8875 TYPE_SIGN (itype));
8877 /* Ignore invalid offsets that are not multiples of the step. */
8878 if (!wi::multiple_of_p (wi::abs (offset),
8879 wi::abs (wi::to_wide (fd.loops[i].step)),
8880 UNSIGNED))
8882 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8883 "ignoring sink clause with offset that is not "
8884 "a multiple of the loop step");
8885 remove = true;
8886 goto next_ordered_clause;
8889 /* Calculate the first dimension. The first dimension of
8890 the folded dependency vector is the GCD of the first
8891 elements, while ignoring any first elements whose offset
8892 is 0. */
8893 if (i == 0)
8895 /* Ignore dependence vectors whose first dimension is 0. */
8896 if (offset == 0)
8898 remove = true;
8899 goto next_ordered_clause;
8901 else
8903 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8905 error_at (OMP_CLAUSE_LOCATION (c),
8906 "first offset must be in opposite direction "
8907 "of loop iterations");
8908 goto lower_omp_ordered_ret;
8910 if (forward)
8911 offset = -offset;
8912 neg_offset_p = forward;
8913 /* Initialize the first time around. */
8914 if (folded_dep == NULL_TREE)
8916 folded_dep = c;
8917 folded_deps[0] = offset;
8919 else
8920 folded_deps[0] = wi::gcd (folded_deps[0],
8921 offset, UNSIGNED);
8924 /* Calculate minimum for the remaining dimensions. */
8925 else
8927 folded_deps[len + i - 1] = offset;
8928 if (folded_dep == c)
8929 folded_deps[i] = offset;
8930 else if (maybe_lexically_later
8931 && !wi::eq_p (folded_deps[i], offset))
8933 if (forward ^ wi::gts_p (folded_deps[i], offset))
8935 unsigned int j;
8936 folded_dep = c;
8937 for (j = 1; j <= i; j++)
8938 folded_deps[j] = folded_deps[len + j - 1];
8940 else
8941 maybe_lexically_later = false;
8945 gcc_assert (i == len);
8947 remove = true;
8949 next_ordered_clause:
8950 if (remove)
8951 *list_p = OMP_CLAUSE_CHAIN (c);
8952 else
8953 list_p = &OMP_CLAUSE_CHAIN (c);
8956 if (folded_dep)
8958 if (neg_offset_p)
8959 folded_deps[0] = -folded_deps[0];
8961 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8962 if (POINTER_TYPE_P (itype))
8963 itype = sizetype;
8965 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8966 = wide_int_to_tree (itype, folded_deps[0]);
8967 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8968 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8971 lower_omp_ordered_ret:
8973 /* Ordered without clauses is #pragma omp threads, while we want
8974 a nop instead if we remove all clauses. */
8975 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8976 gsi_replace (gsi_p, gimple_build_nop (), true);
8980 /* Expand code for an OpenMP ordered directive. */
8982 static void
8983 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8985 tree block;
8986 gimple *stmt = gsi_stmt (*gsi_p), *g;
8987 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8988 gcall *x;
8989 gbind *bind;
8990 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8991 OMP_CLAUSE_SIMD);
8992 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8993 loop. */
8994 bool maybe_simt
8995 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8996 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8997 OMP_CLAUSE_THREADS);
8999 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9000 OMP_CLAUSE_DEPEND))
9002 /* FIXME: This is needs to be moved to the expansion to verify various
9003 conditions only testable on cfg with dominators computed, and also
9004 all the depend clauses to be merged still might need to be available
9005 for the runtime checks. */
9006 if (0)
9007 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9008 return;
9011 push_gimplify_context ();
9013 block = make_node (BLOCK);
9014 bind = gimple_build_bind (NULL, NULL, block);
9015 gsi_replace (gsi_p, bind, true);
9016 gimple_bind_add_stmt (bind, stmt);
9018 if (simd)
9020 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9021 build_int_cst (NULL_TREE, threads));
9022 cfun->has_simduid_loops = true;
9024 else
9025 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9027 gimple_bind_add_stmt (bind, x);
9029 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9030 if (maybe_simt)
9032 counter = create_tmp_var (integer_type_node);
9033 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9034 gimple_call_set_lhs (g, counter);
9035 gimple_bind_add_stmt (bind, g);
9037 body = create_artificial_label (UNKNOWN_LOCATION);
9038 test = create_artificial_label (UNKNOWN_LOCATION);
9039 gimple_bind_add_stmt (bind, gimple_build_label (body));
9041 tree simt_pred = create_tmp_var (integer_type_node);
9042 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9043 gimple_call_set_lhs (g, simt_pred);
9044 gimple_bind_add_stmt (bind, g);
9046 tree t = create_artificial_label (UNKNOWN_LOCATION);
9047 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9048 gimple_bind_add_stmt (bind, g);
9050 gimple_bind_add_stmt (bind, gimple_build_label (t));
9052 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9053 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9054 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9055 gimple_omp_set_body (stmt, NULL);
9057 if (maybe_simt)
9059 gimple_bind_add_stmt (bind, gimple_build_label (test));
9060 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9061 gimple_bind_add_stmt (bind, g);
9063 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9064 tree nonneg = create_tmp_var (integer_type_node);
9065 gimple_seq tseq = NULL;
9066 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9067 gimple_bind_add_seq (bind, tseq);
9069 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9070 gimple_call_set_lhs (g, nonneg);
9071 gimple_bind_add_stmt (bind, g);
9073 tree end = create_artificial_label (UNKNOWN_LOCATION);
9074 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9075 gimple_bind_add_stmt (bind, g);
9077 gimple_bind_add_stmt (bind, gimple_build_label (end));
9079 if (simd)
9080 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9081 build_int_cst (NULL_TREE, threads));
9082 else
9083 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9085 gimple_bind_add_stmt (bind, x);
9087 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9089 pop_gimplify_context (bind);
9091 gimple_bind_append_vars (bind, ctx->block_vars);
9092 BLOCK_VARS (block) = gimple_bind_vars (bind);
9096 /* Expand code for an OpenMP scan directive and the structured block
9097 before the scan directive. */
9099 static void
9100 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9102 gimple *stmt = gsi_stmt (*gsi_p);
9103 bool has_clauses
9104 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9105 tree lane = NULL_TREE;
9106 gimple_seq before = NULL;
9107 omp_context *octx = ctx->outer;
9108 gcc_assert (octx);
9109 if (octx->scan_exclusive && !has_clauses)
9111 gimple_stmt_iterator gsi2 = *gsi_p;
9112 gsi_next (&gsi2);
9113 gimple *stmt2 = gsi_stmt (gsi2);
9114 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9115 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9116 the one with exclusive clause(s), comes first. */
9117 if (stmt2
9118 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9119 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9121 gsi_remove (gsi_p, false);
9122 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9123 ctx = maybe_lookup_ctx (stmt2);
9124 gcc_assert (ctx);
9125 lower_omp_scan (gsi_p, ctx);
9126 return;
9130 bool input_phase = has_clauses ^ octx->scan_inclusive;
9131 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9132 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9133 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9134 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9135 && !gimple_omp_for_combined_p (octx->stmt));
9136 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9137 if (is_for_simd && octx->for_simd_scan_phase)
9138 is_simd = false;
9139 if (is_simd)
9140 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9141 OMP_CLAUSE__SIMDUID_))
9143 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9144 lane = create_tmp_var (unsigned_type_node);
9145 tree t = build_int_cst (integer_type_node,
9146 input_phase ? 1
9147 : octx->scan_inclusive ? 2 : 3);
9148 gimple *g
9149 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9150 gimple_call_set_lhs (g, lane);
9151 gimple_seq_add_stmt (&before, g);
9154 if (is_simd || is_for)
9156 for (tree c = gimple_omp_for_clauses (octx->stmt);
9157 c; c = OMP_CLAUSE_CHAIN (c))
9158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9159 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9161 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9162 tree var = OMP_CLAUSE_DECL (c);
9163 tree new_var = lookup_decl (var, octx);
9164 tree val = new_var;
9165 tree var2 = NULL_TREE;
9166 tree var3 = NULL_TREE;
9167 tree var4 = NULL_TREE;
9168 tree lane0 = NULL_TREE;
9169 tree new_vard = new_var;
9170 if (omp_is_reference (var))
9172 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9173 val = new_var;
9175 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9177 val = DECL_VALUE_EXPR (new_vard);
9178 if (new_vard != new_var)
9180 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9181 val = TREE_OPERAND (val, 0);
9183 if (TREE_CODE (val) == ARRAY_REF
9184 && VAR_P (TREE_OPERAND (val, 0)))
9186 tree v = TREE_OPERAND (val, 0);
9187 if (lookup_attribute ("omp simd array",
9188 DECL_ATTRIBUTES (v)))
9190 val = unshare_expr (val);
9191 lane0 = TREE_OPERAND (val, 1);
9192 TREE_OPERAND (val, 1) = lane;
9193 var2 = lookup_decl (v, octx);
9194 if (octx->scan_exclusive)
9195 var4 = lookup_decl (var2, octx);
9196 if (input_phase
9197 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9198 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9199 if (!input_phase)
9201 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9202 var2, lane, NULL_TREE, NULL_TREE);
9203 TREE_THIS_NOTRAP (var2) = 1;
9204 if (octx->scan_exclusive)
9206 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9207 var4, lane, NULL_TREE,
9208 NULL_TREE);
9209 TREE_THIS_NOTRAP (var4) = 1;
9212 else
9213 var2 = val;
9216 gcc_assert (var2);
9218 else
9220 var2 = build_outer_var_ref (var, octx);
9221 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9223 var3 = maybe_lookup_decl (new_vard, octx);
9224 if (var3 == new_vard || var3 == NULL_TREE)
9225 var3 = NULL_TREE;
9226 else if (is_simd && octx->scan_exclusive && !input_phase)
9228 var4 = maybe_lookup_decl (var3, octx);
9229 if (var4 == var3 || var4 == NULL_TREE)
9231 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9233 var4 = var3;
9234 var3 = NULL_TREE;
9236 else
9237 var4 = NULL_TREE;
9241 if (is_simd
9242 && octx->scan_exclusive
9243 && !input_phase
9244 && var4 == NULL_TREE)
9245 var4 = create_tmp_var (TREE_TYPE (val));
9247 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9249 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9250 if (input_phase)
9252 if (var3)
9254 /* If we've added a separate identity element
9255 variable, copy it over into val. */
9256 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9257 var3);
9258 gimplify_and_add (x, &before);
9260 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9262 /* Otherwise, assign to it the identity element. */
9263 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9264 if (is_for)
9265 tseq = copy_gimple_seq_and_replace_locals (tseq);
9266 tree ref = build_outer_var_ref (var, octx);
9267 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9268 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9269 if (x)
9271 if (new_vard != new_var)
9272 val = build_fold_addr_expr_loc (clause_loc, val);
9273 SET_DECL_VALUE_EXPR (new_vard, val);
9275 SET_DECL_VALUE_EXPR (placeholder, ref);
9276 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9277 lower_omp (&tseq, octx);
9278 if (x)
9279 SET_DECL_VALUE_EXPR (new_vard, x);
9280 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9281 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9282 gimple_seq_add_seq (&before, tseq);
9283 if (is_simd)
9284 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9287 else if (is_simd)
9289 tree x;
9290 if (octx->scan_exclusive)
9292 tree v4 = unshare_expr (var4);
9293 tree v2 = unshare_expr (var2);
9294 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9295 gimplify_and_add (x, &before);
9297 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9298 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9299 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9300 tree vexpr = val;
9301 if (x && new_vard != new_var)
9302 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9303 if (x)
9304 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9305 SET_DECL_VALUE_EXPR (placeholder, var2);
9306 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9307 lower_omp (&tseq, octx);
9308 gimple_seq_add_seq (&before, tseq);
9309 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9310 if (x)
9311 SET_DECL_VALUE_EXPR (new_vard, x);
9312 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9313 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9314 if (octx->scan_inclusive)
9316 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9317 var2);
9318 gimplify_and_add (x, &before);
9320 else if (lane0 == NULL_TREE)
9322 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9323 var4);
9324 gimplify_and_add (x, &before);
9328 else
9330 if (input_phase)
9332 /* input phase. Set val to initializer before
9333 the body. */
9334 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9335 gimplify_assign (val, x, &before);
9337 else if (is_simd)
9339 /* scan phase. */
9340 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9341 if (code == MINUS_EXPR)
9342 code = PLUS_EXPR;
9344 tree x = build2 (code, TREE_TYPE (var2),
9345 unshare_expr (var2), unshare_expr (val));
9346 if (octx->scan_inclusive)
9348 gimplify_assign (unshare_expr (var2), x, &before);
9349 gimplify_assign (val, var2, &before);
9351 else
9353 gimplify_assign (unshare_expr (var4),
9354 unshare_expr (var2), &before);
9355 gimplify_assign (var2, x, &before);
9356 if (lane0 == NULL_TREE)
9357 gimplify_assign (val, var4, &before);
9361 if (octx->scan_exclusive && !input_phase && lane0)
9363 tree vexpr = unshare_expr (var4);
9364 TREE_OPERAND (vexpr, 1) = lane0;
9365 if (new_vard != new_var)
9366 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9367 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9371 if (is_simd && !is_for_simd)
9373 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9374 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9375 gsi_replace (gsi_p, gimple_build_nop (), true);
9376 return;
9378 lower_omp (gimple_omp_body_ptr (stmt), octx);
9379 if (before)
9381 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9382 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9387 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9388 substitution of a couple of function calls. But in the NAMED case,
9389 requires that languages coordinate a symbol name. It is therefore
9390 best put here in common code. */
9392 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9394 static void
9395 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9397 tree block;
9398 tree name, lock, unlock;
9399 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9400 gbind *bind;
9401 location_t loc = gimple_location (stmt);
9402 gimple_seq tbody;
9404 name = gimple_omp_critical_name (stmt);
9405 if (name)
9407 tree decl;
9409 if (!critical_name_mutexes)
9410 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9412 tree *n = critical_name_mutexes->get (name);
9413 if (n == NULL)
9415 char *new_str;
9417 decl = create_tmp_var_raw (ptr_type_node);
9419 new_str = ACONCAT ((".gomp_critical_user_",
9420 IDENTIFIER_POINTER (name), NULL));
9421 DECL_NAME (decl) = get_identifier (new_str);
9422 TREE_PUBLIC (decl) = 1;
9423 TREE_STATIC (decl) = 1;
9424 DECL_COMMON (decl) = 1;
9425 DECL_ARTIFICIAL (decl) = 1;
9426 DECL_IGNORED_P (decl) = 1;
9428 varpool_node::finalize_decl (decl);
9430 critical_name_mutexes->put (name, decl);
9432 else
9433 decl = *n;
9435 /* If '#pragma omp critical' is inside offloaded region or
9436 inside function marked as offloadable, the symbol must be
9437 marked as offloadable too. */
9438 omp_context *octx;
9439 if (cgraph_node::get (current_function_decl)->offloadable)
9440 varpool_node::get_create (decl)->offloadable = 1;
9441 else
9442 for (octx = ctx->outer; octx; octx = octx->outer)
9443 if (is_gimple_omp_offloaded (octx->stmt))
9445 varpool_node::get_create (decl)->offloadable = 1;
9446 break;
9449 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9450 lock = build_call_expr_loc (loc, lock, 1,
9451 build_fold_addr_expr_loc (loc, decl));
9453 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9454 unlock = build_call_expr_loc (loc, unlock, 1,
9455 build_fold_addr_expr_loc (loc, decl));
9457 else
9459 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9460 lock = build_call_expr_loc (loc, lock, 0);
9462 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9463 unlock = build_call_expr_loc (loc, unlock, 0);
9466 push_gimplify_context ();
9468 block = make_node (BLOCK);
9469 bind = gimple_build_bind (NULL, NULL, block);
9470 gsi_replace (gsi_p, bind, true);
9471 gimple_bind_add_stmt (bind, stmt);
9473 tbody = gimple_bind_body (bind);
9474 gimplify_and_add (lock, &tbody);
9475 gimple_bind_set_body (bind, tbody);
9477 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9478 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9479 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9480 gimple_omp_set_body (stmt, NULL);
9482 tbody = gimple_bind_body (bind);
9483 gimplify_and_add (unlock, &tbody);
9484 gimple_bind_set_body (bind, tbody);
9486 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9488 pop_gimplify_context (bind);
9489 gimple_bind_append_vars (bind, ctx->block_vars);
9490 BLOCK_VARS (block) = gimple_bind_vars (bind);
9493 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9494 for a lastprivate clause. Given a loop control predicate of (V
9495 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9496 is appended to *DLIST, iterator initialization is appended to
9497 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9498 to be emitted in a critical section. */
9500 static void
9501 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9502 gimple_seq *dlist, gimple_seq *clist,
9503 struct omp_context *ctx)
9505 tree clauses, cond, vinit;
9506 enum tree_code cond_code;
9507 gimple_seq stmts;
9509 cond_code = fd->loop.cond_code;
9510 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9512 /* When possible, use a strict equality expression. This can let VRP
9513 type optimizations deduce the value and remove a copy. */
9514 if (tree_fits_shwi_p (fd->loop.step))
9516 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9517 if (step == 1 || step == -1)
9518 cond_code = EQ_EXPR;
9521 tree n2 = fd->loop.n2;
9522 if (fd->collapse > 1
9523 && TREE_CODE (n2) != INTEGER_CST
9524 && gimple_omp_for_combined_into_p (fd->for_stmt))
9526 struct omp_context *taskreg_ctx = NULL;
9527 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9529 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9530 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9531 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9533 if (gimple_omp_for_combined_into_p (gfor))
9535 gcc_assert (ctx->outer->outer
9536 && is_parallel_ctx (ctx->outer->outer));
9537 taskreg_ctx = ctx->outer->outer;
9539 else
9541 struct omp_for_data outer_fd;
9542 omp_extract_for_data (gfor, &outer_fd, NULL);
9543 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9546 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9547 taskreg_ctx = ctx->outer->outer;
9549 else if (is_taskreg_ctx (ctx->outer))
9550 taskreg_ctx = ctx->outer;
9551 if (taskreg_ctx)
9553 int i;
9554 tree taskreg_clauses
9555 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9556 tree innerc = omp_find_clause (taskreg_clauses,
9557 OMP_CLAUSE__LOOPTEMP_);
9558 gcc_assert (innerc);
9559 int count = fd->collapse;
9560 if (fd->non_rect
9561 && fd->last_nonrect == fd->first_nonrect + 1)
9562 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
9563 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
9564 count += 4;
9565 for (i = 0; i < count; i++)
9567 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9568 OMP_CLAUSE__LOOPTEMP_);
9569 gcc_assert (innerc);
9571 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9572 OMP_CLAUSE__LOOPTEMP_);
9573 if (innerc)
9574 n2 = fold_convert (TREE_TYPE (n2),
9575 lookup_decl (OMP_CLAUSE_DECL (innerc),
9576 taskreg_ctx));
9579 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9581 clauses = gimple_omp_for_clauses (fd->for_stmt);
9582 stmts = NULL;
9583 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9584 if (!gimple_seq_empty_p (stmts))
9586 gimple_seq_add_seq (&stmts, *dlist);
9587 *dlist = stmts;
9589 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9590 vinit = fd->loop.n1;
9591 if (cond_code == EQ_EXPR
9592 && tree_fits_shwi_p (fd->loop.n2)
9593 && ! integer_zerop (fd->loop.n2))
9594 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9595 else
9596 vinit = unshare_expr (vinit);
9598 /* Initialize the iterator variable, so that threads that don't execute
9599 any iterations don't execute the lastprivate clauses by accident. */
9600 gimplify_assign (fd->loop.v, vinit, body_p);
9604 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9606 static tree
9607 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9608 struct walk_stmt_info *wi)
9610 gimple *stmt = gsi_stmt (*gsi_p);
9612 *handled_ops_p = true;
9613 switch (gimple_code (stmt))
9615 WALK_SUBSTMTS;
9617 case GIMPLE_OMP_FOR:
9618 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9619 && gimple_omp_for_combined_into_p (stmt))
9620 *handled_ops_p = false;
9621 break;
9623 case GIMPLE_OMP_SCAN:
9624 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9625 return integer_zero_node;
9626 default:
9627 break;
9629 return NULL;
9632 /* Helper function for lower_omp_for, add transformations for a worksharing
9633 loop with scan directives inside of it.
9634 For worksharing loop not combined with simd, transform:
9635 #pragma omp for reduction(inscan,+:r) private(i)
9636 for (i = 0; i < n; i = i + 1)
9639 update (r);
9641 #pragma omp scan inclusive(r)
9643 use (r);
9647 into two worksharing loops + code to merge results:
9649 num_threads = omp_get_num_threads ();
9650 thread_num = omp_get_thread_num ();
9651 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9652 <D.2099>:
9653 var2 = r;
9654 goto <D.2101>;
9655 <D.2100>:
9656 // For UDRs this is UDR init, or if ctors are needed, copy from
9657 // var3 that has been constructed to contain the neutral element.
9658 var2 = 0;
9659 <D.2101>:
9660 ivar = 0;
9661 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9662 // a shared array with num_threads elements and rprivb to a local array
9663 // number of elements equal to the number of (contiguous) iterations the
9664 // current thread will perform. controlb and controlp variables are
9665 // temporaries to handle deallocation of rprivb at the end of second
9666 // GOMP_FOR.
9667 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9668 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9669 for (i = 0; i < n; i = i + 1)
9672 // For UDRs this is UDR init or copy from var3.
9673 r = 0;
9674 // This is the input phase from user code.
9675 update (r);
9678 // For UDRs this is UDR merge.
9679 var2 = var2 + r;
9680 // Rather than handing it over to the user, save to local thread's
9681 // array.
9682 rprivb[ivar] = var2;
9683 // For exclusive scan, the above two statements are swapped.
9684 ivar = ivar + 1;
9687 // And remember the final value from this thread's into the shared
9688 // rpriva array.
9689 rpriva[(sizetype) thread_num] = var2;
9690 // If more than one thread, compute using Work-Efficient prefix sum
9691 // the inclusive parallel scan of the rpriva array.
9692 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9693 <D.2102>:
9694 GOMP_barrier ();
9695 down = 0;
9696 k = 1;
9697 num_threadsu = (unsigned int) num_threads;
9698 thread_numup1 = (unsigned int) thread_num + 1;
9699 <D.2108>:
9700 twok = k << 1;
9701 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9702 <D.2110>:
9703 down = 4294967295;
9704 k = k >> 1;
9705 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9706 <D.2112>:
9707 k = k >> 1;
9708 <D.2111>:
9709 twok = k << 1;
9710 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9711 mul = REALPART_EXPR <cplx>;
9712 ovf = IMAGPART_EXPR <cplx>;
9713 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9714 <D.2116>:
9715 andv = k & down;
9716 andvm1 = andv + 4294967295;
9717 l = mul + andvm1;
9718 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9719 <D.2120>:
9720 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9721 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9722 rpriva[l] = rpriva[l - k] + rpriva[l];
9723 <D.2117>:
9724 if (down == 0) goto <D.2121>; else goto <D.2122>;
9725 <D.2121>:
9726 k = k << 1;
9727 goto <D.2123>;
9728 <D.2122>:
9729 k = k >> 1;
9730 <D.2123>:
9731 GOMP_barrier ();
9732 if (k != 0) goto <D.2108>; else goto <D.2103>;
9733 <D.2103>:
9734 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9735 <D.2124>:
9736 // For UDRs this is UDR init or copy from var3.
9737 var2 = 0;
9738 goto <D.2126>;
9739 <D.2125>:
9740 var2 = rpriva[thread_num - 1];
9741 <D.2126>:
9742 ivar = 0;
9743 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9744 reduction(inscan,+:r) private(i)
9745 for (i = 0; i < n; i = i + 1)
9748 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9749 r = var2 + rprivb[ivar];
9752 // This is the scan phase from user code.
9753 use (r);
9754 // Plus a bump of the iterator.
9755 ivar = ivar + 1;
9757 } */
9759 static void
9760 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9761 struct omp_for_data *fd, omp_context *ctx)
9763 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9764 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9766 gimple_seq body = gimple_omp_body (stmt);
9767 gimple_stmt_iterator input1_gsi = gsi_none ();
9768 struct walk_stmt_info wi;
9769 memset (&wi, 0, sizeof (wi));
9770 wi.val_only = true;
9771 wi.info = (void *) &input1_gsi;
9772 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9773 gcc_assert (!gsi_end_p (input1_gsi));
9775 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9776 gimple_stmt_iterator gsi = input1_gsi;
9777 gsi_next (&gsi);
9778 gimple_stmt_iterator scan1_gsi = gsi;
9779 gimple *scan_stmt1 = gsi_stmt (gsi);
9780 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9782 gimple_seq input_body = gimple_omp_body (input_stmt1);
9783 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9784 gimple_omp_set_body (input_stmt1, NULL);
9785 gimple_omp_set_body (scan_stmt1, NULL);
9786 gimple_omp_set_body (stmt, NULL);
9788 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9789 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9790 gimple_omp_set_body (stmt, body);
9791 gimple_omp_set_body (input_stmt1, input_body);
9793 gimple_stmt_iterator input2_gsi = gsi_none ();
9794 memset (&wi, 0, sizeof (wi));
9795 wi.val_only = true;
9796 wi.info = (void *) &input2_gsi;
9797 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9798 gcc_assert (!gsi_end_p (input2_gsi));
9800 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9801 gsi = input2_gsi;
9802 gsi_next (&gsi);
9803 gimple_stmt_iterator scan2_gsi = gsi;
9804 gimple *scan_stmt2 = gsi_stmt (gsi);
9805 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9806 gimple_omp_set_body (scan_stmt2, scan_body);
9808 gimple_stmt_iterator input3_gsi = gsi_none ();
9809 gimple_stmt_iterator scan3_gsi = gsi_none ();
9810 gimple_stmt_iterator input4_gsi = gsi_none ();
9811 gimple_stmt_iterator scan4_gsi = gsi_none ();
9812 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9813 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9814 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9815 if (is_for_simd)
9817 memset (&wi, 0, sizeof (wi));
9818 wi.val_only = true;
9819 wi.info = (void *) &input3_gsi;
9820 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9821 gcc_assert (!gsi_end_p (input3_gsi));
9823 input_stmt3 = gsi_stmt (input3_gsi);
9824 gsi = input3_gsi;
9825 gsi_next (&gsi);
9826 scan3_gsi = gsi;
9827 scan_stmt3 = gsi_stmt (gsi);
9828 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9830 memset (&wi, 0, sizeof (wi));
9831 wi.val_only = true;
9832 wi.info = (void *) &input4_gsi;
9833 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9834 gcc_assert (!gsi_end_p (input4_gsi));
9836 input_stmt4 = gsi_stmt (input4_gsi);
9837 gsi = input4_gsi;
9838 gsi_next (&gsi);
9839 scan4_gsi = gsi;
9840 scan_stmt4 = gsi_stmt (gsi);
9841 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9843 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9844 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9847 tree num_threads = create_tmp_var (integer_type_node);
9848 tree thread_num = create_tmp_var (integer_type_node);
9849 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9850 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9851 gimple *g = gimple_build_call (nthreads_decl, 0);
9852 gimple_call_set_lhs (g, num_threads);
9853 gimple_seq_add_stmt (body_p, g);
9854 g = gimple_build_call (threadnum_decl, 0);
9855 gimple_call_set_lhs (g, thread_num);
9856 gimple_seq_add_stmt (body_p, g);
9858 tree ivar = create_tmp_var (sizetype);
9859 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9860 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9861 tree k = create_tmp_var (unsigned_type_node);
9862 tree l = create_tmp_var (unsigned_type_node);
9864 gimple_seq clist = NULL, mdlist = NULL;
9865 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9866 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9867 gimple_seq scan1_list = NULL, input2_list = NULL;
9868 gimple_seq last_list = NULL, reduc_list = NULL;
9869 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9870 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9871 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9873 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9874 tree var = OMP_CLAUSE_DECL (c);
9875 tree new_var = lookup_decl (var, ctx);
9876 tree var3 = NULL_TREE;
9877 tree new_vard = new_var;
9878 if (omp_is_reference (var))
9879 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9880 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9882 var3 = maybe_lookup_decl (new_vard, ctx);
9883 if (var3 == new_vard)
9884 var3 = NULL_TREE;
9887 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9888 tree rpriva = create_tmp_var (ptype);
9889 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9890 OMP_CLAUSE_DECL (nc) = rpriva;
9891 *cp1 = nc;
9892 cp1 = &OMP_CLAUSE_CHAIN (nc);
9894 tree rprivb = create_tmp_var (ptype);
9895 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9896 OMP_CLAUSE_DECL (nc) = rprivb;
9897 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9898 *cp1 = nc;
9899 cp1 = &OMP_CLAUSE_CHAIN (nc);
9901 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9902 if (new_vard != new_var)
9903 TREE_ADDRESSABLE (var2) = 1;
9904 gimple_add_tmp_var (var2);
9906 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9907 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9908 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9909 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9910 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9912 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9913 thread_num, integer_minus_one_node);
9914 x = fold_convert_loc (clause_loc, sizetype, x);
9915 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9916 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9917 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9918 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9920 x = fold_convert_loc (clause_loc, sizetype, l);
9921 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9922 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9923 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9924 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9926 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9927 x = fold_convert_loc (clause_loc, sizetype, x);
9928 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9929 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9930 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9931 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9933 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9934 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9935 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9936 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9938 tree var4 = is_for_simd ? new_var : var2;
9939 tree var5 = NULL_TREE, var6 = NULL_TREE;
9940 if (is_for_simd)
9942 var5 = lookup_decl (var, input_simd_ctx);
9943 var6 = lookup_decl (var, scan_simd_ctx);
9944 if (new_vard != new_var)
9946 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9947 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9950 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9952 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9953 tree val = var2;
9955 x = lang_hooks.decls.omp_clause_default_ctor
9956 (c, var2, build_outer_var_ref (var, ctx));
9957 if (x)
9958 gimplify_and_add (x, &clist);
9960 x = build_outer_var_ref (var, ctx);
9961 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9963 gimplify_and_add (x, &thr01_list);
9965 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9966 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9967 if (var3)
9969 x = unshare_expr (var4);
9970 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9971 gimplify_and_add (x, &thrn1_list);
9972 x = unshare_expr (var4);
9973 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9974 gimplify_and_add (x, &thr02_list);
9976 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9978 /* Otherwise, assign to it the identity element. */
9979 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9980 tseq = copy_gimple_seq_and_replace_locals (tseq);
9981 if (!is_for_simd)
9983 if (new_vard != new_var)
9984 val = build_fold_addr_expr_loc (clause_loc, val);
9985 SET_DECL_VALUE_EXPR (new_vard, val);
9986 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9988 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9989 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9990 lower_omp (&tseq, ctx);
9991 gimple_seq_add_seq (&thrn1_list, tseq);
9992 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9993 lower_omp (&tseq, ctx);
9994 gimple_seq_add_seq (&thr02_list, tseq);
9995 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9996 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9997 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9998 if (y)
9999 SET_DECL_VALUE_EXPR (new_vard, y);
10000 else
10002 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10003 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10007 x = unshare_expr (var4);
10008 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10009 gimplify_and_add (x, &thrn2_list);
10011 if (is_for_simd)
10013 x = unshare_expr (rprivb_ref);
10014 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10015 gimplify_and_add (x, &scan1_list);
10017 else
10019 if (ctx->scan_exclusive)
10021 x = unshare_expr (rprivb_ref);
10022 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10023 gimplify_and_add (x, &scan1_list);
10026 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10027 tseq = copy_gimple_seq_and_replace_locals (tseq);
10028 SET_DECL_VALUE_EXPR (placeholder, var2);
10029 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10030 lower_omp (&tseq, ctx);
10031 gimple_seq_add_seq (&scan1_list, tseq);
10033 if (ctx->scan_inclusive)
10035 x = unshare_expr (rprivb_ref);
10036 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10037 gimplify_and_add (x, &scan1_list);
10041 x = unshare_expr (rpriva_ref);
10042 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10043 unshare_expr (var4));
10044 gimplify_and_add (x, &mdlist);
10046 x = unshare_expr (is_for_simd ? var6 : new_var);
10047 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10048 gimplify_and_add (x, &input2_list);
10050 val = rprivb_ref;
10051 if (new_vard != new_var)
10052 val = build_fold_addr_expr_loc (clause_loc, val);
10054 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10055 tseq = copy_gimple_seq_and_replace_locals (tseq);
10056 SET_DECL_VALUE_EXPR (new_vard, val);
10057 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10058 if (is_for_simd)
10060 SET_DECL_VALUE_EXPR (placeholder, var6);
10061 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10063 else
10064 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10065 lower_omp (&tseq, ctx);
10066 if (y)
10067 SET_DECL_VALUE_EXPR (new_vard, y);
10068 else
10070 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10071 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10073 if (!is_for_simd)
10075 SET_DECL_VALUE_EXPR (placeholder, new_var);
10076 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10077 lower_omp (&tseq, ctx);
10079 gimple_seq_add_seq (&input2_list, tseq);
10081 x = build_outer_var_ref (var, ctx);
10082 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10083 gimplify_and_add (x, &last_list);
10085 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10086 gimplify_and_add (x, &reduc_list);
10087 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10088 tseq = copy_gimple_seq_and_replace_locals (tseq);
10089 val = rprival_ref;
10090 if (new_vard != new_var)
10091 val = build_fold_addr_expr_loc (clause_loc, val);
10092 SET_DECL_VALUE_EXPR (new_vard, val);
10093 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10094 SET_DECL_VALUE_EXPR (placeholder, var2);
10095 lower_omp (&tseq, ctx);
10096 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10097 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10098 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10099 if (y)
10100 SET_DECL_VALUE_EXPR (new_vard, y);
10101 else
10103 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10104 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10106 gimple_seq_add_seq (&reduc_list, tseq);
10107 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10108 gimplify_and_add (x, &reduc_list);
10110 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10111 if (x)
10112 gimplify_and_add (x, dlist);
10114 else
10116 x = build_outer_var_ref (var, ctx);
10117 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10119 x = omp_reduction_init (c, TREE_TYPE (new_var));
10120 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10121 &thrn1_list);
10122 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10124 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10126 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10127 if (code == MINUS_EXPR)
10128 code = PLUS_EXPR;
10130 if (is_for_simd)
10131 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10132 else
10134 if (ctx->scan_exclusive)
10135 gimplify_assign (unshare_expr (rprivb_ref), var2,
10136 &scan1_list);
10137 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10138 gimplify_assign (var2, x, &scan1_list);
10139 if (ctx->scan_inclusive)
10140 gimplify_assign (unshare_expr (rprivb_ref), var2,
10141 &scan1_list);
10144 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10145 &mdlist);
10147 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10148 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10150 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10151 &last_list);
10153 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10154 unshare_expr (rprival_ref));
10155 gimplify_assign (rprival_ref, x, &reduc_list);
10159 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10160 gimple_seq_add_stmt (&scan1_list, g);
10161 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10162 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10163 ? scan_stmt4 : scan_stmt2), g);
10165 tree controlb = create_tmp_var (boolean_type_node);
10166 tree controlp = create_tmp_var (ptr_type_node);
10167 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10168 OMP_CLAUSE_DECL (nc) = controlb;
10169 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10170 *cp1 = nc;
10171 cp1 = &OMP_CLAUSE_CHAIN (nc);
10172 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10173 OMP_CLAUSE_DECL (nc) = controlp;
10174 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10175 *cp1 = nc;
10176 cp1 = &OMP_CLAUSE_CHAIN (nc);
10177 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10178 OMP_CLAUSE_DECL (nc) = controlb;
10179 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10180 *cp2 = nc;
10181 cp2 = &OMP_CLAUSE_CHAIN (nc);
10182 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10183 OMP_CLAUSE_DECL (nc) = controlp;
10184 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10185 *cp2 = nc;
10186 cp2 = &OMP_CLAUSE_CHAIN (nc);
10188 *cp1 = gimple_omp_for_clauses (stmt);
10189 gimple_omp_for_set_clauses (stmt, new_clauses1);
10190 *cp2 = gimple_omp_for_clauses (new_stmt);
10191 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10193 if (is_for_simd)
10195 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10196 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10198 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10199 GSI_SAME_STMT);
10200 gsi_remove (&input3_gsi, true);
10201 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10202 GSI_SAME_STMT);
10203 gsi_remove (&scan3_gsi, true);
10204 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10205 GSI_SAME_STMT);
10206 gsi_remove (&input4_gsi, true);
10207 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10208 GSI_SAME_STMT);
10209 gsi_remove (&scan4_gsi, true);
10211 else
10213 gimple_omp_set_body (scan_stmt1, scan1_list);
10214 gimple_omp_set_body (input_stmt2, input2_list);
10217 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10218 GSI_SAME_STMT);
10219 gsi_remove (&input1_gsi, true);
10220 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10221 GSI_SAME_STMT);
10222 gsi_remove (&scan1_gsi, true);
10223 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10224 GSI_SAME_STMT);
10225 gsi_remove (&input2_gsi, true);
10226 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10227 GSI_SAME_STMT);
10228 gsi_remove (&scan2_gsi, true);
10230 gimple_seq_add_seq (body_p, clist);
10232 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10233 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10234 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10235 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10236 gimple_seq_add_stmt (body_p, g);
10237 g = gimple_build_label (lab1);
10238 gimple_seq_add_stmt (body_p, g);
10239 gimple_seq_add_seq (body_p, thr01_list);
10240 g = gimple_build_goto (lab3);
10241 gimple_seq_add_stmt (body_p, g);
10242 g = gimple_build_label (lab2);
10243 gimple_seq_add_stmt (body_p, g);
10244 gimple_seq_add_seq (body_p, thrn1_list);
10245 g = gimple_build_label (lab3);
10246 gimple_seq_add_stmt (body_p, g);
10248 g = gimple_build_assign (ivar, size_zero_node);
10249 gimple_seq_add_stmt (body_p, g);
10251 gimple_seq_add_stmt (body_p, stmt);
10252 gimple_seq_add_seq (body_p, body);
10253 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10254 fd->loop.v));
10256 g = gimple_build_omp_return (true);
10257 gimple_seq_add_stmt (body_p, g);
10258 gimple_seq_add_seq (body_p, mdlist);
10260 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10261 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10262 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10263 gimple_seq_add_stmt (body_p, g);
10264 g = gimple_build_label (lab1);
10265 gimple_seq_add_stmt (body_p, g);
10267 g = omp_build_barrier (NULL);
10268 gimple_seq_add_stmt (body_p, g);
10270 tree down = create_tmp_var (unsigned_type_node);
10271 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10272 gimple_seq_add_stmt (body_p, g);
10274 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10275 gimple_seq_add_stmt (body_p, g);
10277 tree num_threadsu = create_tmp_var (unsigned_type_node);
10278 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10279 gimple_seq_add_stmt (body_p, g);
10281 tree thread_numu = create_tmp_var (unsigned_type_node);
10282 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10283 gimple_seq_add_stmt (body_p, g);
10285 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10286 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10287 build_int_cst (unsigned_type_node, 1));
10288 gimple_seq_add_stmt (body_p, g);
10290 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10291 g = gimple_build_label (lab3);
10292 gimple_seq_add_stmt (body_p, g);
10294 tree twok = create_tmp_var (unsigned_type_node);
10295 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10296 gimple_seq_add_stmt (body_p, g);
10298 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10299 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10300 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10301 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10302 gimple_seq_add_stmt (body_p, g);
10303 g = gimple_build_label (lab4);
10304 gimple_seq_add_stmt (body_p, g);
10305 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10306 gimple_seq_add_stmt (body_p, g);
10307 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10308 gimple_seq_add_stmt (body_p, g);
10310 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10311 gimple_seq_add_stmt (body_p, g);
10312 g = gimple_build_label (lab6);
10313 gimple_seq_add_stmt (body_p, g);
10315 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10316 gimple_seq_add_stmt (body_p, g);
10318 g = gimple_build_label (lab5);
10319 gimple_seq_add_stmt (body_p, g);
10321 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10322 gimple_seq_add_stmt (body_p, g);
10324 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10325 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10326 gimple_call_set_lhs (g, cplx);
10327 gimple_seq_add_stmt (body_p, g);
10328 tree mul = create_tmp_var (unsigned_type_node);
10329 g = gimple_build_assign (mul, REALPART_EXPR,
10330 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10331 gimple_seq_add_stmt (body_p, g);
10332 tree ovf = create_tmp_var (unsigned_type_node);
10333 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10334 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10335 gimple_seq_add_stmt (body_p, g);
10337 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10338 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10339 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10340 lab7, lab8);
10341 gimple_seq_add_stmt (body_p, g);
10342 g = gimple_build_label (lab7);
10343 gimple_seq_add_stmt (body_p, g);
10345 tree andv = create_tmp_var (unsigned_type_node);
10346 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10347 gimple_seq_add_stmt (body_p, g);
10348 tree andvm1 = create_tmp_var (unsigned_type_node);
10349 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10350 build_minus_one_cst (unsigned_type_node));
10351 gimple_seq_add_stmt (body_p, g);
10353 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10354 gimple_seq_add_stmt (body_p, g);
10356 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10357 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10358 gimple_seq_add_stmt (body_p, g);
10359 g = gimple_build_label (lab9);
10360 gimple_seq_add_stmt (body_p, g);
10361 gimple_seq_add_seq (body_p, reduc_list);
10362 g = gimple_build_label (lab8);
10363 gimple_seq_add_stmt (body_p, g);
10365 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10366 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10367 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10368 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10369 lab10, lab11);
10370 gimple_seq_add_stmt (body_p, g);
10371 g = gimple_build_label (lab10);
10372 gimple_seq_add_stmt (body_p, g);
10373 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10374 gimple_seq_add_stmt (body_p, g);
10375 g = gimple_build_goto (lab12);
10376 gimple_seq_add_stmt (body_p, g);
10377 g = gimple_build_label (lab11);
10378 gimple_seq_add_stmt (body_p, g);
10379 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10380 gimple_seq_add_stmt (body_p, g);
10381 g = gimple_build_label (lab12);
10382 gimple_seq_add_stmt (body_p, g);
10384 g = omp_build_barrier (NULL);
10385 gimple_seq_add_stmt (body_p, g);
10387 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10388 lab3, lab2);
10389 gimple_seq_add_stmt (body_p, g);
10391 g = gimple_build_label (lab2);
10392 gimple_seq_add_stmt (body_p, g);
10394 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10395 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10396 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10397 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10398 gimple_seq_add_stmt (body_p, g);
10399 g = gimple_build_label (lab1);
10400 gimple_seq_add_stmt (body_p, g);
10401 gimple_seq_add_seq (body_p, thr02_list);
10402 g = gimple_build_goto (lab3);
10403 gimple_seq_add_stmt (body_p, g);
10404 g = gimple_build_label (lab2);
10405 gimple_seq_add_stmt (body_p, g);
10406 gimple_seq_add_seq (body_p, thrn2_list);
10407 g = gimple_build_label (lab3);
10408 gimple_seq_add_stmt (body_p, g);
10410 g = gimple_build_assign (ivar, size_zero_node);
10411 gimple_seq_add_stmt (body_p, g);
10412 gimple_seq_add_stmt (body_p, new_stmt);
10413 gimple_seq_add_seq (body_p, new_body);
10415 gimple_seq new_dlist = NULL;
10416 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10417 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10418 tree num_threadsm1 = create_tmp_var (integer_type_node);
10419 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10420 integer_minus_one_node);
10421 gimple_seq_add_stmt (&new_dlist, g);
10422 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10423 gimple_seq_add_stmt (&new_dlist, g);
10424 g = gimple_build_label (lab1);
10425 gimple_seq_add_stmt (&new_dlist, g);
10426 gimple_seq_add_seq (&new_dlist, last_list);
10427 g = gimple_build_label (lab2);
10428 gimple_seq_add_stmt (&new_dlist, g);
10429 gimple_seq_add_seq (&new_dlist, *dlist);
10430 *dlist = new_dlist;
10433 /* Lower code for an OMP loop directive. */
10435 static void
10436 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10438 tree *rhs_p, block;
10439 struct omp_for_data fd, *fdp = NULL;
10440 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10441 gbind *new_stmt;
10442 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10443 gimple_seq cnt_list = NULL, clist = NULL;
10444 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10445 size_t i;
10447 push_gimplify_context ();
10449 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10451 block = make_node (BLOCK);
10452 new_stmt = gimple_build_bind (NULL, NULL, block);
10453 /* Replace at gsi right away, so that 'stmt' is no member
10454 of a sequence anymore as we're going to add to a different
10455 one below. */
10456 gsi_replace (gsi_p, new_stmt, true);
10458 /* Move declaration of temporaries in the loop body before we make
10459 it go away. */
10460 omp_for_body = gimple_omp_body (stmt);
10461 if (!gimple_seq_empty_p (omp_for_body)
10462 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10464 gbind *inner_bind
10465 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10466 tree vars = gimple_bind_vars (inner_bind);
10467 gimple_bind_append_vars (new_stmt, vars);
10468 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10469 keep them on the inner_bind and it's block. */
10470 gimple_bind_set_vars (inner_bind, NULL_TREE);
10471 if (gimple_bind_block (inner_bind))
10472 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10475 if (gimple_omp_for_combined_into_p (stmt))
10477 omp_extract_for_data (stmt, &fd, NULL);
10478 fdp = &fd;
10480 /* We need two temporaries with fd.loop.v type (istart/iend)
10481 and then (fd.collapse - 1) temporaries with the same
10482 type for count2 ... countN-1 vars if not constant. */
10483 size_t count = 2;
10484 tree type = fd.iter_type;
10485 if (fd.collapse > 1
10486 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10487 count += fd.collapse - 1;
10488 size_t count2 = 0;
10489 tree type2 = NULL_TREE;
10490 bool taskreg_for
10491 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10492 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10493 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10494 tree simtc = NULL;
10495 tree clauses = *pc;
10496 if (fd.collapse > 1
10497 && fd.non_rect
10498 && fd.last_nonrect == fd.first_nonrect + 1
10499 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10500 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
10501 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10503 v = gimple_omp_for_index (stmt, fd.first_nonrect);
10504 type2 = TREE_TYPE (v);
10505 count++;
10506 count2 = 3;
10508 if (taskreg_for)
10509 outerc
10510 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10511 OMP_CLAUSE__LOOPTEMP_);
10512 if (ctx->simt_stmt)
10513 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10514 OMP_CLAUSE__LOOPTEMP_);
10515 for (i = 0; i < count + count2; i++)
10517 tree temp;
10518 if (taskreg_for)
10520 gcc_assert (outerc);
10521 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10522 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10523 OMP_CLAUSE__LOOPTEMP_);
10525 else
10527 /* If there are 2 adjacent SIMD stmts, one with _simt_
10528 clause, another without, make sure they have the same
10529 decls in _looptemp_ clauses, because the outer stmt
10530 they are combined into will look up just one inner_stmt. */
10531 if (ctx->simt_stmt)
10532 temp = OMP_CLAUSE_DECL (simtc);
10533 else
10534 temp = create_tmp_var (i >= count ? type2 : type);
10535 insert_decl_map (&ctx->outer->cb, temp, temp);
10537 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10538 OMP_CLAUSE_DECL (*pc) = temp;
10539 pc = &OMP_CLAUSE_CHAIN (*pc);
10540 if (ctx->simt_stmt)
10541 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10542 OMP_CLAUSE__LOOPTEMP_);
10544 *pc = clauses;
10547 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10548 dlist = NULL;
10549 body = NULL;
10550 tree rclauses
10551 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10552 OMP_CLAUSE_REDUCTION);
10553 tree rtmp = NULL_TREE;
10554 if (rclauses)
10556 tree type = build_pointer_type (pointer_sized_int_node);
10557 tree temp = create_tmp_var (type);
10558 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10559 OMP_CLAUSE_DECL (c) = temp;
10560 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10561 gimple_omp_for_set_clauses (stmt, c);
10562 lower_omp_task_reductions (ctx, OMP_FOR,
10563 gimple_omp_for_clauses (stmt),
10564 &tred_ilist, &tred_dlist);
10565 rclauses = c;
10566 rtmp = make_ssa_name (type);
10567 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10570 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10571 ctx);
10573 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10574 fdp);
10575 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10576 gimple_omp_for_pre_body (stmt));
10578 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10580 /* Lower the header expressions. At this point, we can assume that
10581 the header is of the form:
10583 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10585 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10586 using the .omp_data_s mapping, if needed. */
10587 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10589 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10590 if (TREE_CODE (*rhs_p) == TREE_VEC)
10592 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10593 TREE_VEC_ELT (*rhs_p, 1)
10594 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10595 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10596 TREE_VEC_ELT (*rhs_p, 2)
10597 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10599 else if (!is_gimple_min_invariant (*rhs_p))
10600 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10601 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10602 recompute_tree_invariant_for_addr_expr (*rhs_p);
10604 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10605 if (TREE_CODE (*rhs_p) == TREE_VEC)
10607 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10608 TREE_VEC_ELT (*rhs_p, 1)
10609 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10610 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10611 TREE_VEC_ELT (*rhs_p, 2)
10612 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10614 else if (!is_gimple_min_invariant (*rhs_p))
10615 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10616 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10617 recompute_tree_invariant_for_addr_expr (*rhs_p);
10619 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10620 if (!is_gimple_min_invariant (*rhs_p))
10621 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10623 if (rclauses)
10624 gimple_seq_add_seq (&tred_ilist, cnt_list);
10625 else
10626 gimple_seq_add_seq (&body, cnt_list);
10628 /* Once lowered, extract the bounds and clauses. */
10629 omp_extract_for_data (stmt, &fd, NULL);
10631 if (is_gimple_omp_oacc (ctx->stmt)
10632 && !ctx_in_oacc_kernels_region (ctx))
10633 lower_oacc_head_tail (gimple_location (stmt),
10634 gimple_omp_for_clauses (stmt),
10635 &oacc_head, &oacc_tail, ctx);
10637 /* Add OpenACC partitioning and reduction markers just before the loop. */
10638 if (oacc_head)
10639 gimple_seq_add_seq (&body, oacc_head);
10641 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10643 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10644 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10645 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10646 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10648 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10649 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10650 OMP_CLAUSE_LINEAR_STEP (c)
10651 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10652 ctx);
10655 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10656 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10657 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10658 else
10660 gimple_seq_add_stmt (&body, stmt);
10661 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10664 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10665 fd.loop.v));
10667 /* After the loop, add exit clauses. */
10668 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10670 if (clist)
10672 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10673 gcall *g = gimple_build_call (fndecl, 0);
10674 gimple_seq_add_stmt (&body, g);
10675 gimple_seq_add_seq (&body, clist);
10676 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10677 g = gimple_build_call (fndecl, 0);
10678 gimple_seq_add_stmt (&body, g);
10681 if (ctx->cancellable)
10682 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10684 gimple_seq_add_seq (&body, dlist);
10686 if (rclauses)
10688 gimple_seq_add_seq (&tred_ilist, body);
10689 body = tred_ilist;
10692 body = maybe_catch_exception (body);
10694 /* Region exit marker goes at the end of the loop body. */
10695 gimple *g = gimple_build_omp_return (fd.have_nowait);
10696 gimple_seq_add_stmt (&body, g);
10698 gimple_seq_add_seq (&body, tred_dlist);
10700 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10702 if (rclauses)
10703 OMP_CLAUSE_DECL (rclauses) = rtmp;
10705 /* Add OpenACC joining and reduction markers just after the loop. */
10706 if (oacc_tail)
10707 gimple_seq_add_seq (&body, oacc_tail);
10709 pop_gimplify_context (new_stmt);
10711 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10712 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10713 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10714 if (BLOCK_VARS (block))
10715 TREE_USED (block) = 1;
10717 gimple_bind_set_body (new_stmt, body);
10718 gimple_omp_set_body (stmt, NULL);
10719 gimple_omp_for_set_pre_body (stmt, NULL);
10722 /* Callback for walk_stmts. Check if the current statement only contains
10723 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10725 static tree
10726 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10727 bool *handled_ops_p,
10728 struct walk_stmt_info *wi)
10730 int *info = (int *) wi->info;
10731 gimple *stmt = gsi_stmt (*gsi_p);
10733 *handled_ops_p = true;
10734 switch (gimple_code (stmt))
10736 WALK_SUBSTMTS;
10738 case GIMPLE_DEBUG:
10739 break;
10740 case GIMPLE_OMP_FOR:
10741 case GIMPLE_OMP_SECTIONS:
10742 *info = *info == 0 ? 1 : -1;
10743 break;
10744 default:
10745 *info = -1;
10746 break;
10748 return NULL;
10751 struct omp_taskcopy_context
10753 /* This field must be at the beginning, as we do "inheritance": Some
10754 callback functions for tree-inline.c (e.g., omp_copy_decl)
10755 receive a copy_body_data pointer that is up-casted to an
10756 omp_context pointer. */
10757 copy_body_data cb;
10758 omp_context *ctx;
10761 static tree
10762 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10764 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10766 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10767 return create_tmp_var (TREE_TYPE (var));
10769 return var;
10772 static tree
10773 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10775 tree name, new_fields = NULL, type, f;
10777 type = lang_hooks.types.make_type (RECORD_TYPE);
10778 name = DECL_NAME (TYPE_NAME (orig_type));
10779 name = build_decl (gimple_location (tcctx->ctx->stmt),
10780 TYPE_DECL, name, type);
10781 TYPE_NAME (type) = name;
10783 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10785 tree new_f = copy_node (f);
10786 DECL_CONTEXT (new_f) = type;
10787 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10788 TREE_CHAIN (new_f) = new_fields;
10789 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10790 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10791 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10792 &tcctx->cb, NULL);
10793 new_fields = new_f;
10794 tcctx->cb.decl_map->put (f, new_f);
10796 TYPE_FIELDS (type) = nreverse (new_fields);
10797 layout_type (type);
10798 return type;
10801 /* Create task copyfn. */
10803 static void
10804 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10806 struct function *child_cfun;
10807 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10808 tree record_type, srecord_type, bind, list;
10809 bool record_needs_remap = false, srecord_needs_remap = false;
10810 splay_tree_node n;
10811 struct omp_taskcopy_context tcctx;
10812 location_t loc = gimple_location (task_stmt);
10813 size_t looptempno = 0;
10815 child_fn = gimple_omp_task_copy_fn (task_stmt);
10816 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10817 gcc_assert (child_cfun->cfg == NULL);
10818 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10820 /* Reset DECL_CONTEXT on function arguments. */
10821 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10822 DECL_CONTEXT (t) = child_fn;
10824 /* Populate the function. */
10825 push_gimplify_context ();
10826 push_cfun (child_cfun);
10828 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10829 TREE_SIDE_EFFECTS (bind) = 1;
10830 list = NULL;
10831 DECL_SAVED_TREE (child_fn) = bind;
10832 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10834 /* Remap src and dst argument types if needed. */
10835 record_type = ctx->record_type;
10836 srecord_type = ctx->srecord_type;
10837 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10838 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10840 record_needs_remap = true;
10841 break;
10843 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10844 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10846 srecord_needs_remap = true;
10847 break;
10850 if (record_needs_remap || srecord_needs_remap)
10852 memset (&tcctx, '\0', sizeof (tcctx));
10853 tcctx.cb.src_fn = ctx->cb.src_fn;
10854 tcctx.cb.dst_fn = child_fn;
10855 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10856 gcc_checking_assert (tcctx.cb.src_node);
10857 tcctx.cb.dst_node = tcctx.cb.src_node;
10858 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10859 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10860 tcctx.cb.eh_lp_nr = 0;
10861 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10862 tcctx.cb.decl_map = new hash_map<tree, tree>;
10863 tcctx.ctx = ctx;
10865 if (record_needs_remap)
10866 record_type = task_copyfn_remap_type (&tcctx, record_type);
10867 if (srecord_needs_remap)
10868 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10870 else
10871 tcctx.cb.decl_map = NULL;
10873 arg = DECL_ARGUMENTS (child_fn);
10874 TREE_TYPE (arg) = build_pointer_type (record_type);
10875 sarg = DECL_CHAIN (arg);
10876 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10878 /* First pass: initialize temporaries used in record_type and srecord_type
10879 sizes and field offsets. */
10880 if (tcctx.cb.decl_map)
10881 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10882 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10884 tree *p;
10886 decl = OMP_CLAUSE_DECL (c);
10887 p = tcctx.cb.decl_map->get (decl);
10888 if (p == NULL)
10889 continue;
10890 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10891 sf = (tree) n->value;
10892 sf = *tcctx.cb.decl_map->get (sf);
10893 src = build_simple_mem_ref_loc (loc, sarg);
10894 src = omp_build_component_ref (src, sf);
10895 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10896 append_to_statement_list (t, &list);
10899 /* Second pass: copy shared var pointers and copy construct non-VLA
10900 firstprivate vars. */
10901 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10902 switch (OMP_CLAUSE_CODE (c))
10904 splay_tree_key key;
10905 case OMP_CLAUSE_SHARED:
10906 decl = OMP_CLAUSE_DECL (c);
10907 key = (splay_tree_key) decl;
10908 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10909 key = (splay_tree_key) &DECL_UID (decl);
10910 n = splay_tree_lookup (ctx->field_map, key);
10911 if (n == NULL)
10912 break;
10913 f = (tree) n->value;
10914 if (tcctx.cb.decl_map)
10915 f = *tcctx.cb.decl_map->get (f);
10916 n = splay_tree_lookup (ctx->sfield_map, key);
10917 sf = (tree) n->value;
10918 if (tcctx.cb.decl_map)
10919 sf = *tcctx.cb.decl_map->get (sf);
10920 src = build_simple_mem_ref_loc (loc, sarg);
10921 src = omp_build_component_ref (src, sf);
10922 dst = build_simple_mem_ref_loc (loc, arg);
10923 dst = omp_build_component_ref (dst, f);
10924 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10925 append_to_statement_list (t, &list);
10926 break;
10927 case OMP_CLAUSE_REDUCTION:
10928 case OMP_CLAUSE_IN_REDUCTION:
10929 decl = OMP_CLAUSE_DECL (c);
10930 if (TREE_CODE (decl) == MEM_REF)
10932 decl = TREE_OPERAND (decl, 0);
10933 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10934 decl = TREE_OPERAND (decl, 0);
10935 if (TREE_CODE (decl) == INDIRECT_REF
10936 || TREE_CODE (decl) == ADDR_EXPR)
10937 decl = TREE_OPERAND (decl, 0);
10939 key = (splay_tree_key) decl;
10940 n = splay_tree_lookup (ctx->field_map, key);
10941 if (n == NULL)
10942 break;
10943 f = (tree) n->value;
10944 if (tcctx.cb.decl_map)
10945 f = *tcctx.cb.decl_map->get (f);
10946 n = splay_tree_lookup (ctx->sfield_map, key);
10947 sf = (tree) n->value;
10948 if (tcctx.cb.decl_map)
10949 sf = *tcctx.cb.decl_map->get (sf);
10950 src = build_simple_mem_ref_loc (loc, sarg);
10951 src = omp_build_component_ref (src, sf);
10952 if (decl != OMP_CLAUSE_DECL (c)
10953 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10954 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10955 src = build_simple_mem_ref_loc (loc, src);
10956 dst = build_simple_mem_ref_loc (loc, arg);
10957 dst = omp_build_component_ref (dst, f);
10958 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10959 append_to_statement_list (t, &list);
10960 break;
10961 case OMP_CLAUSE__LOOPTEMP_:
10962 /* Fields for first two _looptemp_ clauses are initialized by
10963 GOMP_taskloop*, the rest are handled like firstprivate. */
10964 if (looptempno < 2)
10966 looptempno++;
10967 break;
10969 /* FALLTHRU */
10970 case OMP_CLAUSE__REDUCTEMP_:
10971 case OMP_CLAUSE_FIRSTPRIVATE:
10972 decl = OMP_CLAUSE_DECL (c);
10973 if (is_variable_sized (decl))
10974 break;
10975 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10976 if (n == NULL)
10977 break;
10978 f = (tree) n->value;
10979 if (tcctx.cb.decl_map)
10980 f = *tcctx.cb.decl_map->get (f);
10981 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10982 if (n != NULL)
10984 sf = (tree) n->value;
10985 if (tcctx.cb.decl_map)
10986 sf = *tcctx.cb.decl_map->get (sf);
10987 src = build_simple_mem_ref_loc (loc, sarg);
10988 src = omp_build_component_ref (src, sf);
10989 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10990 src = build_simple_mem_ref_loc (loc, src);
10992 else
10993 src = decl;
10994 dst = build_simple_mem_ref_loc (loc, arg);
10995 dst = omp_build_component_ref (dst, f);
10996 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10997 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10998 else
10999 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11000 append_to_statement_list (t, &list);
11001 break;
11002 case OMP_CLAUSE_PRIVATE:
11003 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11004 break;
11005 decl = OMP_CLAUSE_DECL (c);
11006 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11007 f = (tree) n->value;
11008 if (tcctx.cb.decl_map)
11009 f = *tcctx.cb.decl_map->get (f);
11010 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11011 if (n != NULL)
11013 sf = (tree) n->value;
11014 if (tcctx.cb.decl_map)
11015 sf = *tcctx.cb.decl_map->get (sf);
11016 src = build_simple_mem_ref_loc (loc, sarg);
11017 src = omp_build_component_ref (src, sf);
11018 if (use_pointer_for_field (decl, NULL))
11019 src = build_simple_mem_ref_loc (loc, src);
11021 else
11022 src = decl;
11023 dst = build_simple_mem_ref_loc (loc, arg);
11024 dst = omp_build_component_ref (dst, f);
11025 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11026 append_to_statement_list (t, &list);
11027 break;
11028 default:
11029 break;
11032 /* Last pass: handle VLA firstprivates. */
11033 if (tcctx.cb.decl_map)
11034 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11035 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11037 tree ind, ptr, df;
11039 decl = OMP_CLAUSE_DECL (c);
11040 if (!is_variable_sized (decl))
11041 continue;
11042 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11043 if (n == NULL)
11044 continue;
11045 f = (tree) n->value;
11046 f = *tcctx.cb.decl_map->get (f);
11047 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11048 ind = DECL_VALUE_EXPR (decl);
11049 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11050 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11051 n = splay_tree_lookup (ctx->sfield_map,
11052 (splay_tree_key) TREE_OPERAND (ind, 0));
11053 sf = (tree) n->value;
11054 sf = *tcctx.cb.decl_map->get (sf);
11055 src = build_simple_mem_ref_loc (loc, sarg);
11056 src = omp_build_component_ref (src, sf);
11057 src = build_simple_mem_ref_loc (loc, src);
11058 dst = build_simple_mem_ref_loc (loc, arg);
11059 dst = omp_build_component_ref (dst, f);
11060 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11061 append_to_statement_list (t, &list);
11062 n = splay_tree_lookup (ctx->field_map,
11063 (splay_tree_key) TREE_OPERAND (ind, 0));
11064 df = (tree) n->value;
11065 df = *tcctx.cb.decl_map->get (df);
11066 ptr = build_simple_mem_ref_loc (loc, arg);
11067 ptr = omp_build_component_ref (ptr, df);
11068 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11069 build_fold_addr_expr_loc (loc, dst));
11070 append_to_statement_list (t, &list);
11073 t = build1 (RETURN_EXPR, void_type_node, NULL);
11074 append_to_statement_list (t, &list);
11076 if (tcctx.cb.decl_map)
11077 delete tcctx.cb.decl_map;
11078 pop_gimplify_context (NULL);
11079 BIND_EXPR_BODY (bind) = list;
11080 pop_cfun ();
11083 static void
11084 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11086 tree c, clauses;
11087 gimple *g;
11088 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11090 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11091 gcc_assert (clauses);
11092 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11093 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11094 switch (OMP_CLAUSE_DEPEND_KIND (c))
11096 case OMP_CLAUSE_DEPEND_LAST:
11097 /* Lowering already done at gimplification. */
11098 return;
11099 case OMP_CLAUSE_DEPEND_IN:
11100 cnt[2]++;
11101 break;
11102 case OMP_CLAUSE_DEPEND_OUT:
11103 case OMP_CLAUSE_DEPEND_INOUT:
11104 cnt[0]++;
11105 break;
11106 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11107 cnt[1]++;
11108 break;
11109 case OMP_CLAUSE_DEPEND_DEPOBJ:
11110 cnt[3]++;
11111 break;
11112 case OMP_CLAUSE_DEPEND_SOURCE:
11113 case OMP_CLAUSE_DEPEND_SINK:
11114 /* FALLTHRU */
11115 default:
11116 gcc_unreachable ();
11118 if (cnt[1] || cnt[3])
11119 idx = 5;
11120 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11121 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11122 tree array = create_tmp_var (type);
11123 TREE_ADDRESSABLE (array) = 1;
11124 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11125 NULL_TREE);
11126 if (idx == 5)
11128 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11129 gimple_seq_add_stmt (iseq, g);
11130 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11131 NULL_TREE);
11133 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11134 gimple_seq_add_stmt (iseq, g);
11135 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11137 r = build4 (ARRAY_REF, ptr_type_node, array,
11138 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11139 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11140 gimple_seq_add_stmt (iseq, g);
11142 for (i = 0; i < 4; i++)
11144 if (cnt[i] == 0)
11145 continue;
11146 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11147 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11148 continue;
11149 else
11151 switch (OMP_CLAUSE_DEPEND_KIND (c))
11153 case OMP_CLAUSE_DEPEND_IN:
11154 if (i != 2)
11155 continue;
11156 break;
11157 case OMP_CLAUSE_DEPEND_OUT:
11158 case OMP_CLAUSE_DEPEND_INOUT:
11159 if (i != 0)
11160 continue;
11161 break;
11162 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11163 if (i != 1)
11164 continue;
11165 break;
11166 case OMP_CLAUSE_DEPEND_DEPOBJ:
11167 if (i != 3)
11168 continue;
11169 break;
11170 default:
11171 gcc_unreachable ();
11173 tree t = OMP_CLAUSE_DECL (c);
11174 t = fold_convert (ptr_type_node, t);
11175 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11176 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11177 NULL_TREE, NULL_TREE);
11178 g = gimple_build_assign (r, t);
11179 gimple_seq_add_stmt (iseq, g);
11182 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11183 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11184 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11185 OMP_CLAUSE_CHAIN (c) = *pclauses;
11186 *pclauses = c;
11187 tree clobber = build_clobber (type);
11188 g = gimple_build_assign (array, clobber);
11189 gimple_seq_add_stmt (oseq, g);
11192 /* Lower the OpenMP parallel or task directive in the current statement
11193 in GSI_P. CTX holds context information for the directive. */
11195 static void
11196 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11198 tree clauses;
11199 tree child_fn, t;
11200 gimple *stmt = gsi_stmt (*gsi_p);
11201 gbind *par_bind, *bind, *dep_bind = NULL;
11202 gimple_seq par_body;
11203 location_t loc = gimple_location (stmt);
11205 clauses = gimple_omp_taskreg_clauses (stmt);
11206 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11207 && gimple_omp_task_taskwait_p (stmt))
11209 par_bind = NULL;
11210 par_body = NULL;
11212 else
11214 par_bind
11215 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11216 par_body = gimple_bind_body (par_bind);
11218 child_fn = ctx->cb.dst_fn;
11219 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11220 && !gimple_omp_parallel_combined_p (stmt))
11222 struct walk_stmt_info wi;
11223 int ws_num = 0;
11225 memset (&wi, 0, sizeof (wi));
11226 wi.info = &ws_num;
11227 wi.val_only = true;
11228 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11229 if (ws_num == 1)
11230 gimple_omp_parallel_set_combined_p (stmt, true);
11232 gimple_seq dep_ilist = NULL;
11233 gimple_seq dep_olist = NULL;
11234 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11235 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11237 push_gimplify_context ();
11238 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11239 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11240 &dep_ilist, &dep_olist);
11243 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11244 && gimple_omp_task_taskwait_p (stmt))
11246 if (dep_bind)
11248 gsi_replace (gsi_p, dep_bind, true);
11249 gimple_bind_add_seq (dep_bind, dep_ilist);
11250 gimple_bind_add_stmt (dep_bind, stmt);
11251 gimple_bind_add_seq (dep_bind, dep_olist);
11252 pop_gimplify_context (dep_bind);
11254 return;
11257 if (ctx->srecord_type)
11258 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11260 gimple_seq tskred_ilist = NULL;
11261 gimple_seq tskred_olist = NULL;
11262 if ((is_task_ctx (ctx)
11263 && gimple_omp_task_taskloop_p (ctx->stmt)
11264 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11265 OMP_CLAUSE_REDUCTION))
11266 || (is_parallel_ctx (ctx)
11267 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11268 OMP_CLAUSE__REDUCTEMP_)))
11270 if (dep_bind == NULL)
11272 push_gimplify_context ();
11273 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11275 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11276 : OMP_PARALLEL,
11277 gimple_omp_taskreg_clauses (ctx->stmt),
11278 &tskred_ilist, &tskred_olist);
11281 push_gimplify_context ();
11283 gimple_seq par_olist = NULL;
11284 gimple_seq par_ilist = NULL;
11285 gimple_seq par_rlist = NULL;
11286 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11287 lower_omp (&par_body, ctx);
11288 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11289 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11291 /* Declare all the variables created by mapping and the variables
11292 declared in the scope of the parallel body. */
11293 record_vars_into (ctx->block_vars, child_fn);
11294 maybe_remove_omp_member_access_dummy_vars (par_bind);
11295 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11297 if (ctx->record_type)
11299 ctx->sender_decl
11300 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11301 : ctx->record_type, ".omp_data_o");
11302 DECL_NAMELESS (ctx->sender_decl) = 1;
11303 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11304 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11307 gimple_seq olist = NULL;
11308 gimple_seq ilist = NULL;
11309 lower_send_clauses (clauses, &ilist, &olist, ctx);
11310 lower_send_shared_vars (&ilist, &olist, ctx);
11312 if (ctx->record_type)
11314 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11315 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11316 clobber));
11319 /* Once all the expansions are done, sequence all the different
11320 fragments inside gimple_omp_body. */
11322 gimple_seq new_body = NULL;
11324 if (ctx->record_type)
11326 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11327 /* fixup_child_record_type might have changed receiver_decl's type. */
11328 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11329 gimple_seq_add_stmt (&new_body,
11330 gimple_build_assign (ctx->receiver_decl, t));
11333 gimple_seq_add_seq (&new_body, par_ilist);
11334 gimple_seq_add_seq (&new_body, par_body);
11335 gimple_seq_add_seq (&new_body, par_rlist);
11336 if (ctx->cancellable)
11337 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11338 gimple_seq_add_seq (&new_body, par_olist);
11339 new_body = maybe_catch_exception (new_body);
11340 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11341 gimple_seq_add_stmt (&new_body,
11342 gimple_build_omp_continue (integer_zero_node,
11343 integer_zero_node));
11344 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11345 gimple_omp_set_body (stmt, new_body);
11347 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11348 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11349 else
11350 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11351 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11352 gimple_bind_add_seq (bind, ilist);
11353 gimple_bind_add_stmt (bind, stmt);
11354 gimple_bind_add_seq (bind, olist);
11356 pop_gimplify_context (NULL);
11358 if (dep_bind)
11360 gimple_bind_add_seq (dep_bind, dep_ilist);
11361 gimple_bind_add_seq (dep_bind, tskred_ilist);
11362 gimple_bind_add_stmt (dep_bind, bind);
11363 gimple_bind_add_seq (dep_bind, tskred_olist);
11364 gimple_bind_add_seq (dep_bind, dep_olist);
11365 pop_gimplify_context (dep_bind);
11369 /* Lower the GIMPLE_OMP_TARGET in the current statement
11370 in GSI_P. CTX holds context information for the directive. */
11372 static void
11373 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11375 tree clauses;
11376 tree child_fn, t, c;
11377 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11378 gbind *tgt_bind, *bind, *dep_bind = NULL;
11379 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11380 location_t loc = gimple_location (stmt);
11381 bool offloaded, data_region;
11382 unsigned int map_cnt = 0;
11384 offloaded = is_gimple_omp_offloaded (stmt);
11385 switch (gimple_omp_target_kind (stmt))
11387 case GF_OMP_TARGET_KIND_REGION:
11388 case GF_OMP_TARGET_KIND_UPDATE:
11389 case GF_OMP_TARGET_KIND_ENTER_DATA:
11390 case GF_OMP_TARGET_KIND_EXIT_DATA:
11391 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11392 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11393 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11394 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11395 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11396 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11397 data_region = false;
11398 break;
11399 case GF_OMP_TARGET_KIND_DATA:
11400 case GF_OMP_TARGET_KIND_OACC_DATA:
11401 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11402 data_region = true;
11403 break;
11404 default:
11405 gcc_unreachable ();
11408 clauses = gimple_omp_target_clauses (stmt);
11410 gimple_seq dep_ilist = NULL;
11411 gimple_seq dep_olist = NULL;
11412 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11414 push_gimplify_context ();
11415 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11416 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11417 &dep_ilist, &dep_olist);
11420 tgt_bind = NULL;
11421 tgt_body = NULL;
11422 if (offloaded)
11424 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11425 tgt_body = gimple_bind_body (tgt_bind);
11427 else if (data_region)
11428 tgt_body = gimple_omp_body (stmt);
11429 child_fn = ctx->cb.dst_fn;
11431 push_gimplify_context ();
11432 fplist = NULL;
11434 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11435 switch (OMP_CLAUSE_CODE (c))
11437 tree var, x;
11439 default:
11440 break;
11441 case OMP_CLAUSE_MAP:
11442 #if CHECKING_P
11443 /* First check what we're prepared to handle in the following. */
11444 switch (OMP_CLAUSE_MAP_KIND (c))
11446 case GOMP_MAP_ALLOC:
11447 case GOMP_MAP_TO:
11448 case GOMP_MAP_FROM:
11449 case GOMP_MAP_TOFROM:
11450 case GOMP_MAP_POINTER:
11451 case GOMP_MAP_TO_PSET:
11452 case GOMP_MAP_DELETE:
11453 case GOMP_MAP_RELEASE:
11454 case GOMP_MAP_ALWAYS_TO:
11455 case GOMP_MAP_ALWAYS_FROM:
11456 case GOMP_MAP_ALWAYS_TOFROM:
11457 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11458 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11459 case GOMP_MAP_STRUCT:
11460 case GOMP_MAP_ALWAYS_POINTER:
11461 break;
11462 case GOMP_MAP_IF_PRESENT:
11463 case GOMP_MAP_FORCE_ALLOC:
11464 case GOMP_MAP_FORCE_TO:
11465 case GOMP_MAP_FORCE_FROM:
11466 case GOMP_MAP_FORCE_TOFROM:
11467 case GOMP_MAP_FORCE_PRESENT:
11468 case GOMP_MAP_FORCE_DEVICEPTR:
11469 case GOMP_MAP_DEVICE_RESIDENT:
11470 case GOMP_MAP_LINK:
11471 case GOMP_MAP_ATTACH:
11472 case GOMP_MAP_DETACH:
11473 case GOMP_MAP_FORCE_DETACH:
11474 gcc_assert (is_gimple_omp_oacc (stmt));
11475 break;
11476 default:
11477 gcc_unreachable ();
11479 #endif
11480 /* FALLTHRU */
11481 case OMP_CLAUSE_TO:
11482 case OMP_CLAUSE_FROM:
11483 oacc_firstprivate:
11484 var = OMP_CLAUSE_DECL (c);
11485 if (!DECL_P (var))
11487 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11488 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11489 && (OMP_CLAUSE_MAP_KIND (c)
11490 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11491 map_cnt++;
11492 continue;
11495 if (DECL_SIZE (var)
11496 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11498 tree var2 = DECL_VALUE_EXPR (var);
11499 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11500 var2 = TREE_OPERAND (var2, 0);
11501 gcc_assert (DECL_P (var2));
11502 var = var2;
11505 if (offloaded
11506 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11507 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11508 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11510 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11512 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11513 && varpool_node::get_create (var)->offloadable)
11514 continue;
11516 tree type = build_pointer_type (TREE_TYPE (var));
11517 tree new_var = lookup_decl (var, ctx);
11518 x = create_tmp_var_raw (type, get_name (new_var));
11519 gimple_add_tmp_var (x);
11520 x = build_simple_mem_ref (x);
11521 SET_DECL_VALUE_EXPR (new_var, x);
11522 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11524 continue;
11527 if (!maybe_lookup_field (var, ctx))
11528 continue;
11530 /* Don't remap compute constructs' reduction variables, because the
11531 intermediate result must be local to each gang. */
11532 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11533 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11535 x = build_receiver_ref (var, true, ctx);
11536 tree new_var = lookup_decl (var, ctx);
11538 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11539 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11540 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11541 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11542 x = build_simple_mem_ref (x);
11543 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11545 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11546 if (omp_is_reference (new_var)
11547 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11548 || DECL_BY_REFERENCE (var)))
11550 /* Create a local object to hold the instance
11551 value. */
11552 tree type = TREE_TYPE (TREE_TYPE (new_var));
11553 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11554 tree inst = create_tmp_var (type, id);
11555 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11556 x = build_fold_addr_expr (inst);
11558 gimplify_assign (new_var, x, &fplist);
11560 else if (DECL_P (new_var))
11562 SET_DECL_VALUE_EXPR (new_var, x);
11563 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11565 else
11566 gcc_unreachable ();
11568 map_cnt++;
11569 break;
11571 case OMP_CLAUSE_FIRSTPRIVATE:
11572 if (is_oacc_parallel_or_serial (ctx))
11573 goto oacc_firstprivate;
11574 map_cnt++;
11575 var = OMP_CLAUSE_DECL (c);
11576 if (!omp_is_reference (var)
11577 && !is_gimple_reg_type (TREE_TYPE (var)))
11579 tree new_var = lookup_decl (var, ctx);
11580 if (is_variable_sized (var))
11582 tree pvar = DECL_VALUE_EXPR (var);
11583 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11584 pvar = TREE_OPERAND (pvar, 0);
11585 gcc_assert (DECL_P (pvar));
11586 tree new_pvar = lookup_decl (pvar, ctx);
11587 x = build_fold_indirect_ref (new_pvar);
11588 TREE_THIS_NOTRAP (x) = 1;
11590 else
11591 x = build_receiver_ref (var, true, ctx);
11592 SET_DECL_VALUE_EXPR (new_var, x);
11593 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11595 break;
11597 case OMP_CLAUSE_PRIVATE:
11598 if (is_gimple_omp_oacc (ctx->stmt))
11599 break;
11600 var = OMP_CLAUSE_DECL (c);
11601 if (is_variable_sized (var))
11603 tree new_var = lookup_decl (var, ctx);
11604 tree pvar = DECL_VALUE_EXPR (var);
11605 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11606 pvar = TREE_OPERAND (pvar, 0);
11607 gcc_assert (DECL_P (pvar));
11608 tree new_pvar = lookup_decl (pvar, ctx);
11609 x = build_fold_indirect_ref (new_pvar);
11610 TREE_THIS_NOTRAP (x) = 1;
11611 SET_DECL_VALUE_EXPR (new_var, x);
11612 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11614 break;
11616 case OMP_CLAUSE_USE_DEVICE_PTR:
11617 case OMP_CLAUSE_USE_DEVICE_ADDR:
11618 case OMP_CLAUSE_IS_DEVICE_PTR:
11619 var = OMP_CLAUSE_DECL (c);
11620 map_cnt++;
11621 if (is_variable_sized (var))
11623 tree new_var = lookup_decl (var, ctx);
11624 tree pvar = DECL_VALUE_EXPR (var);
11625 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11626 pvar = TREE_OPERAND (pvar, 0);
11627 gcc_assert (DECL_P (pvar));
11628 tree new_pvar = lookup_decl (pvar, ctx);
11629 x = build_fold_indirect_ref (new_pvar);
11630 TREE_THIS_NOTRAP (x) = 1;
11631 SET_DECL_VALUE_EXPR (new_var, x);
11632 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11634 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11635 && !omp_is_reference (var)
11636 && !omp_is_allocatable_or_ptr (var)
11637 && !lang_hooks.decls.omp_array_data (var, true))
11638 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11640 tree new_var = lookup_decl (var, ctx);
11641 tree type = build_pointer_type (TREE_TYPE (var));
11642 x = create_tmp_var_raw (type, get_name (new_var));
11643 gimple_add_tmp_var (x);
11644 x = build_simple_mem_ref (x);
11645 SET_DECL_VALUE_EXPR (new_var, x);
11646 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11648 else
11650 tree new_var = lookup_decl (var, ctx);
11651 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11652 gimple_add_tmp_var (x);
11653 SET_DECL_VALUE_EXPR (new_var, x);
11654 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11656 break;
11659 if (offloaded)
11661 target_nesting_level++;
11662 lower_omp (&tgt_body, ctx);
11663 target_nesting_level--;
11665 else if (data_region)
11666 lower_omp (&tgt_body, ctx);
11668 if (offloaded)
11670 /* Declare all the variables created by mapping and the variables
11671 declared in the scope of the target body. */
11672 record_vars_into (ctx->block_vars, child_fn);
11673 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11674 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11677 olist = NULL;
11678 ilist = NULL;
11679 if (ctx->record_type)
11681 ctx->sender_decl
11682 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11683 DECL_NAMELESS (ctx->sender_decl) = 1;
11684 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11685 t = make_tree_vec (3);
11686 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11687 TREE_VEC_ELT (t, 1)
11688 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11689 ".omp_data_sizes");
11690 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11691 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11692 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11693 tree tkind_type = short_unsigned_type_node;
11694 int talign_shift = 8;
11695 TREE_VEC_ELT (t, 2)
11696 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11697 ".omp_data_kinds");
11698 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11699 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11700 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11701 gimple_omp_target_set_data_arg (stmt, t);
11703 vec<constructor_elt, va_gc> *vsize;
11704 vec<constructor_elt, va_gc> *vkind;
11705 vec_alloc (vsize, map_cnt);
11706 vec_alloc (vkind, map_cnt);
11707 unsigned int map_idx = 0;
11709 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11710 switch (OMP_CLAUSE_CODE (c))
11712 tree ovar, nc, s, purpose, var, x, type;
11713 unsigned int talign;
11715 default:
11716 break;
11718 case OMP_CLAUSE_MAP:
11719 case OMP_CLAUSE_TO:
11720 case OMP_CLAUSE_FROM:
11721 oacc_firstprivate_map:
11722 nc = c;
11723 ovar = OMP_CLAUSE_DECL (c);
11724 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11725 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11726 || (OMP_CLAUSE_MAP_KIND (c)
11727 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11728 break;
11729 if (!DECL_P (ovar))
11731 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11732 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11734 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11735 == get_base_address (ovar));
11736 nc = OMP_CLAUSE_CHAIN (c);
11737 ovar = OMP_CLAUSE_DECL (nc);
11739 else
11741 tree x = build_sender_ref (ovar, ctx);
11742 tree v
11743 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11744 gimplify_assign (x, v, &ilist);
11745 nc = NULL_TREE;
11748 else
11750 if (DECL_SIZE (ovar)
11751 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11753 tree ovar2 = DECL_VALUE_EXPR (ovar);
11754 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11755 ovar2 = TREE_OPERAND (ovar2, 0);
11756 gcc_assert (DECL_P (ovar2));
11757 ovar = ovar2;
11759 if (!maybe_lookup_field (ovar, ctx))
11760 continue;
11763 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11764 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11765 talign = DECL_ALIGN_UNIT (ovar);
11766 if (nc)
11768 var = lookup_decl_in_outer_ctx (ovar, ctx);
11769 x = build_sender_ref (ovar, ctx);
11771 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11772 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11773 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11774 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11776 gcc_assert (offloaded);
11777 tree avar
11778 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11779 mark_addressable (avar);
11780 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11781 talign = DECL_ALIGN_UNIT (avar);
11782 avar = build_fold_addr_expr (avar);
11783 gimplify_assign (x, avar, &ilist);
11785 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11787 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11788 if (!omp_is_reference (var))
11790 if (is_gimple_reg (var)
11791 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11792 TREE_NO_WARNING (var) = 1;
11793 var = build_fold_addr_expr (var);
11795 else
11796 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11797 gimplify_assign (x, var, &ilist);
11799 else if (is_gimple_reg (var))
11801 gcc_assert (offloaded);
11802 tree avar = create_tmp_var (TREE_TYPE (var));
11803 mark_addressable (avar);
11804 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11805 if (GOMP_MAP_COPY_TO_P (map_kind)
11806 || map_kind == GOMP_MAP_POINTER
11807 || map_kind == GOMP_MAP_TO_PSET
11808 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11810 /* If we need to initialize a temporary
11811 with VAR because it is not addressable, and
11812 the variable hasn't been initialized yet, then
11813 we'll get a warning for the store to avar.
11814 Don't warn in that case, the mapping might
11815 be implicit. */
11816 TREE_NO_WARNING (var) = 1;
11817 gimplify_assign (avar, var, &ilist);
11819 avar = build_fold_addr_expr (avar);
11820 gimplify_assign (x, avar, &ilist);
11821 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11822 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11823 && !TYPE_READONLY (TREE_TYPE (var)))
11825 x = unshare_expr (x);
11826 x = build_simple_mem_ref (x);
11827 gimplify_assign (var, x, &olist);
11830 else
11832 /* While MAP is handled explicitly by the FE,
11833 for 'target update', only the identified is passed. */
11834 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11835 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11836 && (omp_is_allocatable_or_ptr (var)
11837 && omp_check_optional_argument (var, false)))
11838 var = build_fold_indirect_ref (var);
11839 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11840 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11841 || (!omp_is_allocatable_or_ptr (var)
11842 && !omp_check_optional_argument (var, false)))
11843 var = build_fold_addr_expr (var);
11844 gimplify_assign (x, var, &ilist);
11847 s = NULL_TREE;
11848 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11850 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11851 s = TREE_TYPE (ovar);
11852 if (TREE_CODE (s) == REFERENCE_TYPE
11853 || omp_check_optional_argument (ovar, false))
11854 s = TREE_TYPE (s);
11855 s = TYPE_SIZE_UNIT (s);
11857 else
11858 s = OMP_CLAUSE_SIZE (c);
11859 if (s == NULL_TREE)
11860 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11861 s = fold_convert (size_type_node, s);
11862 purpose = size_int (map_idx++);
11863 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11864 if (TREE_CODE (s) != INTEGER_CST)
11865 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11867 unsigned HOST_WIDE_INT tkind, tkind_zero;
11868 switch (OMP_CLAUSE_CODE (c))
11870 case OMP_CLAUSE_MAP:
11871 tkind = OMP_CLAUSE_MAP_KIND (c);
11872 tkind_zero = tkind;
11873 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11874 switch (tkind)
11876 case GOMP_MAP_ALLOC:
11877 case GOMP_MAP_IF_PRESENT:
11878 case GOMP_MAP_TO:
11879 case GOMP_MAP_FROM:
11880 case GOMP_MAP_TOFROM:
11881 case GOMP_MAP_ALWAYS_TO:
11882 case GOMP_MAP_ALWAYS_FROM:
11883 case GOMP_MAP_ALWAYS_TOFROM:
11884 case GOMP_MAP_RELEASE:
11885 case GOMP_MAP_FORCE_TO:
11886 case GOMP_MAP_FORCE_FROM:
11887 case GOMP_MAP_FORCE_TOFROM:
11888 case GOMP_MAP_FORCE_PRESENT:
11889 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11890 break;
11891 case GOMP_MAP_DELETE:
11892 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11893 default:
11894 break;
11896 if (tkind_zero != tkind)
11898 if (integer_zerop (s))
11899 tkind = tkind_zero;
11900 else if (integer_nonzerop (s))
11901 tkind_zero = tkind;
11903 break;
11904 case OMP_CLAUSE_FIRSTPRIVATE:
11905 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11906 tkind = GOMP_MAP_TO;
11907 tkind_zero = tkind;
11908 break;
11909 case OMP_CLAUSE_TO:
11910 tkind = GOMP_MAP_TO;
11911 tkind_zero = tkind;
11912 break;
11913 case OMP_CLAUSE_FROM:
11914 tkind = GOMP_MAP_FROM;
11915 tkind_zero = tkind;
11916 break;
11917 default:
11918 gcc_unreachable ();
11920 gcc_checking_assert (tkind
11921 < (HOST_WIDE_INT_C (1U) << talign_shift));
11922 gcc_checking_assert (tkind_zero
11923 < (HOST_WIDE_INT_C (1U) << talign_shift));
11924 talign = ceil_log2 (talign);
11925 tkind |= talign << talign_shift;
11926 tkind_zero |= talign << talign_shift;
11927 gcc_checking_assert (tkind
11928 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11929 gcc_checking_assert (tkind_zero
11930 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11931 if (tkind == tkind_zero)
11932 x = build_int_cstu (tkind_type, tkind);
11933 else
11935 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11936 x = build3 (COND_EXPR, tkind_type,
11937 fold_build2 (EQ_EXPR, boolean_type_node,
11938 unshare_expr (s), size_zero_node),
11939 build_int_cstu (tkind_type, tkind_zero),
11940 build_int_cstu (tkind_type, tkind));
11942 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11943 if (nc && nc != c)
11944 c = nc;
11945 break;
11947 case OMP_CLAUSE_FIRSTPRIVATE:
11948 if (is_oacc_parallel_or_serial (ctx))
11949 goto oacc_firstprivate_map;
11950 ovar = OMP_CLAUSE_DECL (c);
11951 if (omp_is_reference (ovar))
11952 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11953 else
11954 talign = DECL_ALIGN_UNIT (ovar);
11955 var = lookup_decl_in_outer_ctx (ovar, ctx);
11956 x = build_sender_ref (ovar, ctx);
11957 tkind = GOMP_MAP_FIRSTPRIVATE;
11958 type = TREE_TYPE (ovar);
11959 if (omp_is_reference (ovar))
11960 type = TREE_TYPE (type);
11961 if ((INTEGRAL_TYPE_P (type)
11962 && TYPE_PRECISION (type) <= POINTER_SIZE)
11963 || TREE_CODE (type) == POINTER_TYPE)
11965 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11966 tree t = var;
11967 if (omp_is_reference (var))
11968 t = build_simple_mem_ref (var);
11969 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11970 TREE_NO_WARNING (var) = 1;
11971 if (TREE_CODE (type) != POINTER_TYPE)
11972 t = fold_convert (pointer_sized_int_node, t);
11973 t = fold_convert (TREE_TYPE (x), t);
11974 gimplify_assign (x, t, &ilist);
11976 else if (omp_is_reference (var))
11977 gimplify_assign (x, var, &ilist);
11978 else if (is_gimple_reg (var))
11980 tree avar = create_tmp_var (TREE_TYPE (var));
11981 mark_addressable (avar);
11982 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11983 TREE_NO_WARNING (var) = 1;
11984 gimplify_assign (avar, var, &ilist);
11985 avar = build_fold_addr_expr (avar);
11986 gimplify_assign (x, avar, &ilist);
11988 else
11990 var = build_fold_addr_expr (var);
11991 gimplify_assign (x, var, &ilist);
11993 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11994 s = size_int (0);
11995 else if (omp_is_reference (ovar))
11996 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11997 else
11998 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11999 s = fold_convert (size_type_node, s);
12000 purpose = size_int (map_idx++);
12001 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12002 if (TREE_CODE (s) != INTEGER_CST)
12003 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12005 gcc_checking_assert (tkind
12006 < (HOST_WIDE_INT_C (1U) << talign_shift));
12007 talign = ceil_log2 (talign);
12008 tkind |= talign << talign_shift;
12009 gcc_checking_assert (tkind
12010 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12011 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12012 build_int_cstu (tkind_type, tkind));
12013 break;
12015 case OMP_CLAUSE_USE_DEVICE_PTR:
12016 case OMP_CLAUSE_USE_DEVICE_ADDR:
12017 case OMP_CLAUSE_IS_DEVICE_PTR:
12018 ovar = OMP_CLAUSE_DECL (c);
12019 var = lookup_decl_in_outer_ctx (ovar, ctx);
12021 if (lang_hooks.decls.omp_array_data (ovar, true))
12023 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12024 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12025 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12027 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12029 tkind = GOMP_MAP_USE_DEVICE_PTR;
12030 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12032 else
12034 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12035 x = build_sender_ref (ovar, ctx);
12038 if (is_gimple_omp_oacc (ctx->stmt))
12040 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12042 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12043 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12046 type = TREE_TYPE (ovar);
12047 if (lang_hooks.decls.omp_array_data (ovar, true))
12048 var = lang_hooks.decls.omp_array_data (ovar, false);
12049 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12050 && !omp_is_reference (ovar)
12051 && !omp_is_allocatable_or_ptr (ovar))
12052 || TREE_CODE (type) == ARRAY_TYPE)
12053 var = build_fold_addr_expr (var);
12054 else
12056 if (omp_is_reference (ovar)
12057 || omp_check_optional_argument (ovar, false)
12058 || omp_is_allocatable_or_ptr (ovar))
12060 type = TREE_TYPE (type);
12061 if (TREE_CODE (type) != ARRAY_TYPE
12062 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12063 && !omp_is_allocatable_or_ptr (ovar))
12064 || (omp_is_reference (ovar)
12065 && omp_is_allocatable_or_ptr (ovar))))
12066 var = build_simple_mem_ref (var);
12067 var = fold_convert (TREE_TYPE (x), var);
12070 tree present;
12071 present = omp_check_optional_argument (ovar, true);
12072 if (present)
12074 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12075 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12076 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12077 tree new_x = unshare_expr (x);
12078 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12079 fb_rvalue);
12080 gcond *cond = gimple_build_cond_from_tree (present,
12081 notnull_label,
12082 null_label);
12083 gimple_seq_add_stmt (&ilist, cond);
12084 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12085 gimplify_assign (new_x, null_pointer_node, &ilist);
12086 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12087 gimple_seq_add_stmt (&ilist,
12088 gimple_build_label (notnull_label));
12089 gimplify_assign (x, var, &ilist);
12090 gimple_seq_add_stmt (&ilist,
12091 gimple_build_label (opt_arg_label));
12093 else
12094 gimplify_assign (x, var, &ilist);
12095 s = size_int (0);
12096 purpose = size_int (map_idx++);
12097 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12098 gcc_checking_assert (tkind
12099 < (HOST_WIDE_INT_C (1U) << talign_shift));
12100 gcc_checking_assert (tkind
12101 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12102 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12103 build_int_cstu (tkind_type, tkind));
12104 break;
12107 gcc_assert (map_idx == map_cnt);
12109 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12110 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12111 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12112 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12113 for (int i = 1; i <= 2; i++)
12114 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12116 gimple_seq initlist = NULL;
12117 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12118 TREE_VEC_ELT (t, i)),
12119 &initlist, true, NULL_TREE);
12120 gimple_seq_add_seq (&ilist, initlist);
12122 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12123 gimple_seq_add_stmt (&olist,
12124 gimple_build_assign (TREE_VEC_ELT (t, i),
12125 clobber));
12128 tree clobber = build_clobber (ctx->record_type);
12129 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12130 clobber));
12133 /* Once all the expansions are done, sequence all the different
12134 fragments inside gimple_omp_body. */
12136 new_body = NULL;
12138 if (offloaded
12139 && ctx->record_type)
12141 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12142 /* fixup_child_record_type might have changed receiver_decl's type. */
12143 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12144 gimple_seq_add_stmt (&new_body,
12145 gimple_build_assign (ctx->receiver_decl, t));
12147 gimple_seq_add_seq (&new_body, fplist);
12149 if (offloaded || data_region)
12151 tree prev = NULL_TREE;
12152 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12153 switch (OMP_CLAUSE_CODE (c))
12155 tree var, x;
12156 default:
12157 break;
12158 case OMP_CLAUSE_FIRSTPRIVATE:
12159 if (is_gimple_omp_oacc (ctx->stmt))
12160 break;
12161 var = OMP_CLAUSE_DECL (c);
12162 if (omp_is_reference (var)
12163 || is_gimple_reg_type (TREE_TYPE (var)))
12165 tree new_var = lookup_decl (var, ctx);
12166 tree type;
12167 type = TREE_TYPE (var);
12168 if (omp_is_reference (var))
12169 type = TREE_TYPE (type);
12170 if ((INTEGRAL_TYPE_P (type)
12171 && TYPE_PRECISION (type) <= POINTER_SIZE)
12172 || TREE_CODE (type) == POINTER_TYPE)
12174 x = build_receiver_ref (var, false, ctx);
12175 if (TREE_CODE (type) != POINTER_TYPE)
12176 x = fold_convert (pointer_sized_int_node, x);
12177 x = fold_convert (type, x);
12178 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12179 fb_rvalue);
12180 if (omp_is_reference (var))
12182 tree v = create_tmp_var_raw (type, get_name (var));
12183 gimple_add_tmp_var (v);
12184 TREE_ADDRESSABLE (v) = 1;
12185 gimple_seq_add_stmt (&new_body,
12186 gimple_build_assign (v, x));
12187 x = build_fold_addr_expr (v);
12189 gimple_seq_add_stmt (&new_body,
12190 gimple_build_assign (new_var, x));
12192 else
12194 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12195 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12196 fb_rvalue);
12197 gimple_seq_add_stmt (&new_body,
12198 gimple_build_assign (new_var, x));
12201 else if (is_variable_sized (var))
12203 tree pvar = DECL_VALUE_EXPR (var);
12204 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12205 pvar = TREE_OPERAND (pvar, 0);
12206 gcc_assert (DECL_P (pvar));
12207 tree new_var = lookup_decl (pvar, ctx);
12208 x = build_receiver_ref (var, false, ctx);
12209 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12210 gimple_seq_add_stmt (&new_body,
12211 gimple_build_assign (new_var, x));
12213 break;
12214 case OMP_CLAUSE_PRIVATE:
12215 if (is_gimple_omp_oacc (ctx->stmt))
12216 break;
12217 var = OMP_CLAUSE_DECL (c);
12218 if (omp_is_reference (var))
12220 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12221 tree new_var = lookup_decl (var, ctx);
12222 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12223 if (TREE_CONSTANT (x))
12225 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12226 get_name (var));
12227 gimple_add_tmp_var (x);
12228 TREE_ADDRESSABLE (x) = 1;
12229 x = build_fold_addr_expr_loc (clause_loc, x);
12231 else
12232 break;
12234 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12235 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12236 gimple_seq_add_stmt (&new_body,
12237 gimple_build_assign (new_var, x));
12239 break;
12240 case OMP_CLAUSE_USE_DEVICE_PTR:
12241 case OMP_CLAUSE_USE_DEVICE_ADDR:
12242 case OMP_CLAUSE_IS_DEVICE_PTR:
12243 tree new_var;
12244 gimple_seq assign_body;
12245 bool is_array_data;
12246 bool do_optional_check;
12247 assign_body = NULL;
12248 do_optional_check = false;
12249 var = OMP_CLAUSE_DECL (c);
12250 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12252 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12253 x = build_sender_ref (is_array_data
12254 ? (splay_tree_key) &DECL_NAME (var)
12255 : (splay_tree_key) &DECL_UID (var), ctx);
12256 else
12257 x = build_receiver_ref (var, false, ctx);
12259 if (is_array_data)
12261 bool is_ref = omp_is_reference (var);
12262 do_optional_check = true;
12263 /* First, we copy the descriptor data from the host; then
12264 we update its data to point to the target address. */
12265 new_var = lookup_decl (var, ctx);
12266 new_var = DECL_VALUE_EXPR (new_var);
12267 tree v = new_var;
12269 if (is_ref)
12271 var = build_fold_indirect_ref (var);
12272 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12273 fb_rvalue);
12274 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12275 gimple_add_tmp_var (v);
12276 TREE_ADDRESSABLE (v) = 1;
12277 gimple_seq_add_stmt (&assign_body,
12278 gimple_build_assign (v, var));
12279 tree rhs = build_fold_addr_expr (v);
12280 gimple_seq_add_stmt (&assign_body,
12281 gimple_build_assign (new_var, rhs));
12283 else
12284 gimple_seq_add_stmt (&assign_body,
12285 gimple_build_assign (new_var, var));
12287 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12288 gcc_assert (v2);
12289 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12290 gimple_seq_add_stmt (&assign_body,
12291 gimple_build_assign (v2, x));
12293 else if (is_variable_sized (var))
12295 tree pvar = DECL_VALUE_EXPR (var);
12296 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12297 pvar = TREE_OPERAND (pvar, 0);
12298 gcc_assert (DECL_P (pvar));
12299 new_var = lookup_decl (pvar, ctx);
12300 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12301 gimple_seq_add_stmt (&assign_body,
12302 gimple_build_assign (new_var, x));
12304 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12305 && !omp_is_reference (var)
12306 && !omp_is_allocatable_or_ptr (var))
12307 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12309 new_var = lookup_decl (var, ctx);
12310 new_var = DECL_VALUE_EXPR (new_var);
12311 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12312 new_var = TREE_OPERAND (new_var, 0);
12313 gcc_assert (DECL_P (new_var));
12314 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12315 gimple_seq_add_stmt (&assign_body,
12316 gimple_build_assign (new_var, x));
12318 else
12320 tree type = TREE_TYPE (var);
12321 new_var = lookup_decl (var, ctx);
12322 if (omp_is_reference (var))
12324 type = TREE_TYPE (type);
12325 if (TREE_CODE (type) != ARRAY_TYPE
12326 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12327 || (omp_is_reference (var)
12328 && omp_is_allocatable_or_ptr (var))))
12330 tree v = create_tmp_var_raw (type, get_name (var));
12331 gimple_add_tmp_var (v);
12332 TREE_ADDRESSABLE (v) = 1;
12333 x = fold_convert (type, x);
12334 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12335 fb_rvalue);
12336 gimple_seq_add_stmt (&assign_body,
12337 gimple_build_assign (v, x));
12338 x = build_fold_addr_expr (v);
12339 do_optional_check = true;
12342 new_var = DECL_VALUE_EXPR (new_var);
12343 x = fold_convert (TREE_TYPE (new_var), x);
12344 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12345 gimple_seq_add_stmt (&assign_body,
12346 gimple_build_assign (new_var, x));
12348 tree present;
12349 present = (do_optional_check
12350 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12351 : NULL_TREE);
12352 if (present)
12354 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12355 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12356 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12357 glabel *null_glabel = gimple_build_label (null_label);
12358 glabel *notnull_glabel = gimple_build_label (notnull_label);
12359 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12360 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12361 fb_rvalue);
12362 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12363 fb_rvalue);
12364 gcond *cond = gimple_build_cond_from_tree (present,
12365 notnull_label,
12366 null_label);
12367 gimple_seq_add_stmt (&new_body, cond);
12368 gimple_seq_add_stmt (&new_body, null_glabel);
12369 gimplify_assign (new_var, null_pointer_node, &new_body);
12370 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12371 gimple_seq_add_stmt (&new_body, notnull_glabel);
12372 gimple_seq_add_seq (&new_body, assign_body);
12373 gimple_seq_add_stmt (&new_body,
12374 gimple_build_label (opt_arg_label));
12376 else
12377 gimple_seq_add_seq (&new_body, assign_body);
12378 break;
12380 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12381 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12382 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12383 or references to VLAs. */
12384 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12385 switch (OMP_CLAUSE_CODE (c))
12387 tree var;
12388 default:
12389 break;
12390 case OMP_CLAUSE_MAP:
12391 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12392 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12394 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12395 poly_int64 offset = 0;
12396 gcc_assert (prev);
12397 var = OMP_CLAUSE_DECL (c);
12398 if (DECL_P (var)
12399 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12400 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12401 ctx))
12402 && varpool_node::get_create (var)->offloadable)
12403 break;
12404 if (TREE_CODE (var) == INDIRECT_REF
12405 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12406 var = TREE_OPERAND (var, 0);
12407 if (TREE_CODE (var) == COMPONENT_REF)
12409 var = get_addr_base_and_unit_offset (var, &offset);
12410 gcc_assert (var != NULL_TREE && DECL_P (var));
12412 else if (DECL_SIZE (var)
12413 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12415 tree var2 = DECL_VALUE_EXPR (var);
12416 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12417 var2 = TREE_OPERAND (var2, 0);
12418 gcc_assert (DECL_P (var2));
12419 var = var2;
12421 tree new_var = lookup_decl (var, ctx), x;
12422 tree type = TREE_TYPE (new_var);
12423 bool is_ref;
12424 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12425 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12426 == COMPONENT_REF))
12428 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12429 is_ref = true;
12430 new_var = build2 (MEM_REF, type,
12431 build_fold_addr_expr (new_var),
12432 build_int_cst (build_pointer_type (type),
12433 offset));
12435 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12437 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12438 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12439 new_var = build2 (MEM_REF, type,
12440 build_fold_addr_expr (new_var),
12441 build_int_cst (build_pointer_type (type),
12442 offset));
12444 else
12445 is_ref = omp_is_reference (var);
12446 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12447 is_ref = false;
12448 bool ref_to_array = false;
12449 if (is_ref)
12451 type = TREE_TYPE (type);
12452 if (TREE_CODE (type) == ARRAY_TYPE)
12454 type = build_pointer_type (type);
12455 ref_to_array = true;
12458 else if (TREE_CODE (type) == ARRAY_TYPE)
12460 tree decl2 = DECL_VALUE_EXPR (new_var);
12461 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12462 decl2 = TREE_OPERAND (decl2, 0);
12463 gcc_assert (DECL_P (decl2));
12464 new_var = decl2;
12465 type = TREE_TYPE (new_var);
12467 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12468 x = fold_convert_loc (clause_loc, type, x);
12469 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12471 tree bias = OMP_CLAUSE_SIZE (c);
12472 if (DECL_P (bias))
12473 bias = lookup_decl (bias, ctx);
12474 bias = fold_convert_loc (clause_loc, sizetype, bias);
12475 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12476 bias);
12477 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12478 TREE_TYPE (x), x, bias);
12480 if (ref_to_array)
12481 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12482 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12483 if (is_ref && !ref_to_array)
12485 tree t = create_tmp_var_raw (type, get_name (var));
12486 gimple_add_tmp_var (t);
12487 TREE_ADDRESSABLE (t) = 1;
12488 gimple_seq_add_stmt (&new_body,
12489 gimple_build_assign (t, x));
12490 x = build_fold_addr_expr_loc (clause_loc, t);
12492 gimple_seq_add_stmt (&new_body,
12493 gimple_build_assign (new_var, x));
12494 prev = NULL_TREE;
12496 else if (OMP_CLAUSE_CHAIN (c)
12497 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12498 == OMP_CLAUSE_MAP
12499 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12500 == GOMP_MAP_FIRSTPRIVATE_POINTER
12501 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12502 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12503 prev = c;
12504 break;
12505 case OMP_CLAUSE_PRIVATE:
12506 var = OMP_CLAUSE_DECL (c);
12507 if (is_variable_sized (var))
12509 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12510 tree new_var = lookup_decl (var, ctx);
12511 tree pvar = DECL_VALUE_EXPR (var);
12512 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12513 pvar = TREE_OPERAND (pvar, 0);
12514 gcc_assert (DECL_P (pvar));
12515 tree new_pvar = lookup_decl (pvar, ctx);
12516 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12517 tree al = size_int (DECL_ALIGN (var));
12518 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12519 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12520 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12521 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12522 gimple_seq_add_stmt (&new_body,
12523 gimple_build_assign (new_pvar, x));
12525 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12527 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12528 tree new_var = lookup_decl (var, ctx);
12529 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12530 if (TREE_CONSTANT (x))
12531 break;
12532 else
12534 tree atmp
12535 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12536 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12537 tree al = size_int (TYPE_ALIGN (rtype));
12538 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12541 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12542 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12543 gimple_seq_add_stmt (&new_body,
12544 gimple_build_assign (new_var, x));
12546 break;
12549 gimple_seq fork_seq = NULL;
12550 gimple_seq join_seq = NULL;
12552 if (is_oacc_parallel_or_serial (ctx))
12554 /* If there are reductions on the offloaded region itself, treat
12555 them as a dummy GANG loop. */
12556 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12558 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12559 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12562 gimple_seq_add_seq (&new_body, fork_seq);
12563 gimple_seq_add_seq (&new_body, tgt_body);
12564 gimple_seq_add_seq (&new_body, join_seq);
12566 if (offloaded)
12567 new_body = maybe_catch_exception (new_body);
12569 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12570 gimple_omp_set_body (stmt, new_body);
12573 bind = gimple_build_bind (NULL, NULL,
12574 tgt_bind ? gimple_bind_block (tgt_bind)
12575 : NULL_TREE);
12576 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12577 gimple_bind_add_seq (bind, ilist);
12578 gimple_bind_add_stmt (bind, stmt);
12579 gimple_bind_add_seq (bind, olist);
12581 pop_gimplify_context (NULL);
12583 if (dep_bind)
12585 gimple_bind_add_seq (dep_bind, dep_ilist);
12586 gimple_bind_add_stmt (dep_bind, bind);
12587 gimple_bind_add_seq (dep_bind, dep_olist);
12588 pop_gimplify_context (dep_bind);
12592 /* Expand code for an OpenMP teams directive. */
12594 static void
12595 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12597 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12598 push_gimplify_context ();
12600 tree block = make_node (BLOCK);
12601 gbind *bind = gimple_build_bind (NULL, NULL, block);
12602 gsi_replace (gsi_p, bind, true);
12603 gimple_seq bind_body = NULL;
12604 gimple_seq dlist = NULL;
12605 gimple_seq olist = NULL;
12607 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12608 OMP_CLAUSE_NUM_TEAMS);
12609 if (num_teams == NULL_TREE)
12610 num_teams = build_int_cst (unsigned_type_node, 0);
12611 else
12613 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12614 num_teams = fold_convert (unsigned_type_node, num_teams);
12615 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12617 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12618 OMP_CLAUSE_THREAD_LIMIT);
12619 if (thread_limit == NULL_TREE)
12620 thread_limit = build_int_cst (unsigned_type_node, 0);
12621 else
12623 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12624 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12625 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12626 fb_rvalue);
12629 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12630 &bind_body, &dlist, ctx, NULL);
12631 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12632 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12633 NULL, ctx);
12634 gimple_seq_add_stmt (&bind_body, teams_stmt);
12636 location_t loc = gimple_location (teams_stmt);
12637 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12638 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12639 gimple_set_location (call, loc);
12640 gimple_seq_add_stmt (&bind_body, call);
12642 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12643 gimple_omp_set_body (teams_stmt, NULL);
12644 gimple_seq_add_seq (&bind_body, olist);
12645 gimple_seq_add_seq (&bind_body, dlist);
12646 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12647 gimple_bind_set_body (bind, bind_body);
12649 pop_gimplify_context (bind);
12651 gimple_bind_append_vars (bind, ctx->block_vars);
12652 BLOCK_VARS (block) = ctx->block_vars;
12653 if (BLOCK_VARS (block))
12654 TREE_USED (block) = 1;
12657 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12658 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12659 of OMP context, but with task_shared_vars set. */
12661 static tree
12662 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12663 void *data)
12665 tree t = *tp;
12667 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12668 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12669 return t;
12671 if (task_shared_vars
12672 && DECL_P (t)
12673 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12674 return t;
12676 /* If a global variable has been privatized, TREE_CONSTANT on
12677 ADDR_EXPR might be wrong. */
12678 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12679 recompute_tree_invariant_for_addr_expr (t);
12681 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12682 return NULL_TREE;
12685 /* Data to be communicated between lower_omp_regimplify_operands and
12686 lower_omp_regimplify_operands_p. */
12688 struct lower_omp_regimplify_operands_data
12690 omp_context *ctx;
12691 vec<tree> *decls;
12694 /* Helper function for lower_omp_regimplify_operands. Find
12695 omp_member_access_dummy_var vars and adjust temporarily their
12696 DECL_VALUE_EXPRs if needed. */
12698 static tree
12699 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12700 void *data)
12702 tree t = omp_member_access_dummy_var (*tp);
12703 if (t)
12705 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12706 lower_omp_regimplify_operands_data *ldata
12707 = (lower_omp_regimplify_operands_data *) wi->info;
12708 tree o = maybe_lookup_decl (t, ldata->ctx);
12709 if (o != t)
12711 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12712 ldata->decls->safe_push (*tp);
12713 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12714 SET_DECL_VALUE_EXPR (*tp, v);
12717 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12718 return NULL_TREE;
12721 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12722 of omp_member_access_dummy_var vars during regimplification. */
12724 static void
12725 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12726 gimple_stmt_iterator *gsi_p)
12728 auto_vec<tree, 10> decls;
12729 if (ctx)
12731 struct walk_stmt_info wi;
12732 memset (&wi, '\0', sizeof (wi));
12733 struct lower_omp_regimplify_operands_data data;
12734 data.ctx = ctx;
12735 data.decls = &decls;
12736 wi.info = &data;
12737 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12739 gimple_regimplify_operands (stmt, gsi_p);
12740 while (!decls.is_empty ())
12742 tree t = decls.pop ();
12743 tree v = decls.pop ();
12744 SET_DECL_VALUE_EXPR (t, v);
12748 static void
12749 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12751 gimple *stmt = gsi_stmt (*gsi_p);
12752 struct walk_stmt_info wi;
12753 gcall *call_stmt;
12755 if (gimple_has_location (stmt))
12756 input_location = gimple_location (stmt);
12758 if (task_shared_vars)
12759 memset (&wi, '\0', sizeof (wi));
12761 /* If we have issued syntax errors, avoid doing any heavy lifting.
12762 Just replace the OMP directives with a NOP to avoid
12763 confusing RTL expansion. */
12764 if (seen_error () && is_gimple_omp (stmt))
12766 gsi_replace (gsi_p, gimple_build_nop (), true);
12767 return;
12770 switch (gimple_code (stmt))
12772 case GIMPLE_COND:
12774 gcond *cond_stmt = as_a <gcond *> (stmt);
12775 if ((ctx || task_shared_vars)
12776 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12777 lower_omp_regimplify_p,
12778 ctx ? NULL : &wi, NULL)
12779 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12780 lower_omp_regimplify_p,
12781 ctx ? NULL : &wi, NULL)))
12782 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12784 break;
12785 case GIMPLE_CATCH:
12786 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12787 break;
12788 case GIMPLE_EH_FILTER:
12789 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12790 break;
12791 case GIMPLE_TRY:
12792 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12793 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12794 break;
12795 case GIMPLE_TRANSACTION:
12796 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12797 ctx);
12798 break;
12799 case GIMPLE_BIND:
12800 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12801 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12802 break;
12803 case GIMPLE_OMP_PARALLEL:
12804 case GIMPLE_OMP_TASK:
12805 ctx = maybe_lookup_ctx (stmt);
12806 gcc_assert (ctx);
12807 if (ctx->cancellable)
12808 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12809 lower_omp_taskreg (gsi_p, ctx);
12810 break;
12811 case GIMPLE_OMP_FOR:
12812 ctx = maybe_lookup_ctx (stmt);
12813 gcc_assert (ctx);
12814 if (ctx->cancellable)
12815 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12816 lower_omp_for (gsi_p, ctx);
12817 break;
12818 case GIMPLE_OMP_SECTIONS:
12819 ctx = maybe_lookup_ctx (stmt);
12820 gcc_assert (ctx);
12821 if (ctx->cancellable)
12822 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12823 lower_omp_sections (gsi_p, ctx);
12824 break;
12825 case GIMPLE_OMP_SINGLE:
12826 ctx = maybe_lookup_ctx (stmt);
12827 gcc_assert (ctx);
12828 lower_omp_single (gsi_p, ctx);
12829 break;
12830 case GIMPLE_OMP_MASTER:
12831 ctx = maybe_lookup_ctx (stmt);
12832 gcc_assert (ctx);
12833 lower_omp_master (gsi_p, ctx);
12834 break;
12835 case GIMPLE_OMP_TASKGROUP:
12836 ctx = maybe_lookup_ctx (stmt);
12837 gcc_assert (ctx);
12838 lower_omp_taskgroup (gsi_p, ctx);
12839 break;
12840 case GIMPLE_OMP_ORDERED:
12841 ctx = maybe_lookup_ctx (stmt);
12842 gcc_assert (ctx);
12843 lower_omp_ordered (gsi_p, ctx);
12844 break;
12845 case GIMPLE_OMP_SCAN:
12846 ctx = maybe_lookup_ctx (stmt);
12847 gcc_assert (ctx);
12848 lower_omp_scan (gsi_p, ctx);
12849 break;
12850 case GIMPLE_OMP_CRITICAL:
12851 ctx = maybe_lookup_ctx (stmt);
12852 gcc_assert (ctx);
12853 lower_omp_critical (gsi_p, ctx);
12854 break;
12855 case GIMPLE_OMP_ATOMIC_LOAD:
12856 if ((ctx || task_shared_vars)
12857 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12858 as_a <gomp_atomic_load *> (stmt)),
12859 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12860 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12861 break;
12862 case GIMPLE_OMP_TARGET:
12863 ctx = maybe_lookup_ctx (stmt);
12864 gcc_assert (ctx);
12865 lower_omp_target (gsi_p, ctx);
12866 break;
12867 case GIMPLE_OMP_TEAMS:
12868 ctx = maybe_lookup_ctx (stmt);
12869 gcc_assert (ctx);
12870 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12871 lower_omp_taskreg (gsi_p, ctx);
12872 else
12873 lower_omp_teams (gsi_p, ctx);
12874 break;
12875 case GIMPLE_CALL:
12876 tree fndecl;
12877 call_stmt = as_a <gcall *> (stmt);
12878 fndecl = gimple_call_fndecl (call_stmt);
12879 if (fndecl
12880 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12881 switch (DECL_FUNCTION_CODE (fndecl))
12883 case BUILT_IN_GOMP_BARRIER:
12884 if (ctx == NULL)
12885 break;
12886 /* FALLTHRU */
12887 case BUILT_IN_GOMP_CANCEL:
12888 case BUILT_IN_GOMP_CANCELLATION_POINT:
12889 omp_context *cctx;
12890 cctx = ctx;
12891 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12892 cctx = cctx->outer;
12893 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12894 if (!cctx->cancellable)
12896 if (DECL_FUNCTION_CODE (fndecl)
12897 == BUILT_IN_GOMP_CANCELLATION_POINT)
12899 stmt = gimple_build_nop ();
12900 gsi_replace (gsi_p, stmt, false);
12902 break;
12904 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12906 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12907 gimple_call_set_fndecl (call_stmt, fndecl);
12908 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12910 tree lhs;
12911 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12912 gimple_call_set_lhs (call_stmt, lhs);
12913 tree fallthru_label;
12914 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12915 gimple *g;
12916 g = gimple_build_label (fallthru_label);
12917 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12918 g = gimple_build_cond (NE_EXPR, lhs,
12919 fold_convert (TREE_TYPE (lhs),
12920 boolean_false_node),
12921 cctx->cancel_label, fallthru_label);
12922 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12923 break;
12924 default:
12925 break;
12927 goto regimplify;
12929 case GIMPLE_ASSIGN:
12930 for (omp_context *up = ctx; up; up = up->outer)
12932 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12933 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12934 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12935 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12936 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12937 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12938 && (gimple_omp_target_kind (up->stmt)
12939 == GF_OMP_TARGET_KIND_DATA)))
12940 continue;
12941 else if (!up->lastprivate_conditional_map)
12942 break;
12943 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12944 if (TREE_CODE (lhs) == MEM_REF
12945 && DECL_P (TREE_OPERAND (lhs, 0))
12946 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12947 0))) == REFERENCE_TYPE)
12948 lhs = TREE_OPERAND (lhs, 0);
12949 if (DECL_P (lhs))
12950 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12952 tree clauses;
12953 if (up->combined_into_simd_safelen1)
12955 up = up->outer;
12956 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12957 up = up->outer;
12959 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12960 clauses = gimple_omp_for_clauses (up->stmt);
12961 else
12962 clauses = gimple_omp_sections_clauses (up->stmt);
12963 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12964 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12965 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12966 OMP_CLAUSE__CONDTEMP_);
12967 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12968 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12969 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12972 /* FALLTHRU */
12974 default:
12975 regimplify:
12976 if ((ctx || task_shared_vars)
12977 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12978 ctx ? NULL : &wi))
12980 /* Just remove clobbers, this should happen only if we have
12981 "privatized" local addressable variables in SIMD regions,
12982 the clobber isn't needed in that case and gimplifying address
12983 of the ARRAY_REF into a pointer and creating MEM_REF based
12984 clobber would create worse code than we get with the clobber
12985 dropped. */
12986 if (gimple_clobber_p (stmt))
12988 gsi_replace (gsi_p, gimple_build_nop (), true);
12989 break;
12991 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12993 break;
12997 static void
12998 lower_omp (gimple_seq *body, omp_context *ctx)
13000 location_t saved_location = input_location;
13001 gimple_stmt_iterator gsi;
13002 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13003 lower_omp_1 (&gsi, ctx);
13004 /* During gimplification, we haven't folded statments inside offloading
13005 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13006 if (target_nesting_level || taskreg_nesting_level)
13007 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13008 fold_stmt (&gsi);
13009 input_location = saved_location;
13012 /* Main entry point. */
13014 static unsigned int
13015 execute_lower_omp (void)
13017 gimple_seq body;
13018 int i;
13019 omp_context *ctx;
13021 /* This pass always runs, to provide PROP_gimple_lomp.
13022 But often, there is nothing to do. */
13023 if (flag_openacc == 0 && flag_openmp == 0
13024 && flag_openmp_simd == 0)
13025 return 0;
13027 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13028 delete_omp_context);
13030 body = gimple_body (current_function_decl);
13032 scan_omp (&body, NULL);
13033 gcc_assert (taskreg_nesting_level == 0);
13034 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13035 finish_taskreg_scan (ctx);
13036 taskreg_contexts.release ();
13038 if (all_contexts->root)
13040 if (task_shared_vars)
13041 push_gimplify_context ();
13042 lower_omp (&body, NULL);
13043 if (task_shared_vars)
13044 pop_gimplify_context (NULL);
13047 if (all_contexts)
13049 splay_tree_delete (all_contexts);
13050 all_contexts = NULL;
13052 BITMAP_FREE (task_shared_vars);
13053 BITMAP_FREE (global_nonaddressable_vars);
13055 /* If current function is a method, remove artificial dummy VAR_DECL created
13056 for non-static data member privatization, they aren't needed for
13057 debuginfo nor anything else, have been already replaced everywhere in the
13058 IL and cause problems with LTO. */
13059 if (DECL_ARGUMENTS (current_function_decl)
13060 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13061 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13062 == POINTER_TYPE))
13063 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13064 return 0;
13067 namespace {
13069 const pass_data pass_data_lower_omp =
13071 GIMPLE_PASS, /* type */
13072 "omplower", /* name */
13073 OPTGROUP_OMP, /* optinfo_flags */
13074 TV_NONE, /* tv_id */
13075 PROP_gimple_any, /* properties_required */
13076 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13077 0, /* properties_destroyed */
13078 0, /* todo_flags_start */
13079 0, /* todo_flags_finish */
13082 class pass_lower_omp : public gimple_opt_pass
13084 public:
13085 pass_lower_omp (gcc::context *ctxt)
13086 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13089 /* opt_pass methods: */
13090 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13092 }; // class pass_lower_omp
13094 } // anon namespace
13096 gimple_opt_pass *
13097 make_pass_lower_omp (gcc::context *ctxt)
13099 return new pass_lower_omp (ctxt);
13102 /* The following is a utility to diagnose structured block violations.
13103 It is not part of the "omplower" pass, as that's invoked too late. It
13104 should be invoked by the respective front ends after gimplification. */
13106 static splay_tree all_labels;
13108 /* Check for mismatched contexts and generate an error if needed. Return
13109 true if an error is detected. */
13111 static bool
13112 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13113 gimple *branch_ctx, gimple *label_ctx)
13115 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13116 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13118 if (label_ctx == branch_ctx)
13119 return false;
13121 const char* kind = NULL;
13123 if (flag_openacc)
13125 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13126 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13128 gcc_checking_assert (kind == NULL);
13129 kind = "OpenACC";
13132 if (kind == NULL)
13134 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13135 kind = "OpenMP";
13138 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13139 so we could traverse it and issue a correct "exit" or "enter" error
13140 message upon a structured block violation.
13142 We built the context by building a list with tree_cons'ing, but there is
13143 no easy counterpart in gimple tuples. It seems like far too much work
13144 for issuing exit/enter error messages. If someone really misses the
13145 distinct error message... patches welcome. */
13147 #if 0
13148 /* Try to avoid confusing the user by producing and error message
13149 with correct "exit" or "enter" verbiage. We prefer "exit"
13150 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13151 if (branch_ctx == NULL)
13152 exit_p = false;
13153 else
13155 while (label_ctx)
13157 if (TREE_VALUE (label_ctx) == branch_ctx)
13159 exit_p = false;
13160 break;
13162 label_ctx = TREE_CHAIN (label_ctx);
13166 if (exit_p)
13167 error ("invalid exit from %s structured block", kind);
13168 else
13169 error ("invalid entry to %s structured block", kind);
13170 #endif
13172 /* If it's obvious we have an invalid entry, be specific about the error. */
13173 if (branch_ctx == NULL)
13174 error ("invalid entry to %s structured block", kind);
13175 else
13177 /* Otherwise, be vague and lazy, but efficient. */
13178 error ("invalid branch to/from %s structured block", kind);
13181 gsi_replace (gsi_p, gimple_build_nop (), false);
13182 return true;
13185 /* Pass 1: Create a minimal tree of structured blocks, and record
13186 where each label is found. */
13188 static tree
13189 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13190 struct walk_stmt_info *wi)
13192 gimple *context = (gimple *) wi->info;
13193 gimple *inner_context;
13194 gimple *stmt = gsi_stmt (*gsi_p);
13196 *handled_ops_p = true;
13198 switch (gimple_code (stmt))
13200 WALK_SUBSTMTS;
13202 case GIMPLE_OMP_PARALLEL:
13203 case GIMPLE_OMP_TASK:
13204 case GIMPLE_OMP_SECTIONS:
13205 case GIMPLE_OMP_SINGLE:
13206 case GIMPLE_OMP_SECTION:
13207 case GIMPLE_OMP_MASTER:
13208 case GIMPLE_OMP_ORDERED:
13209 case GIMPLE_OMP_SCAN:
13210 case GIMPLE_OMP_CRITICAL:
13211 case GIMPLE_OMP_TARGET:
13212 case GIMPLE_OMP_TEAMS:
13213 case GIMPLE_OMP_TASKGROUP:
13214 /* The minimal context here is just the current OMP construct. */
13215 inner_context = stmt;
13216 wi->info = inner_context;
13217 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13218 wi->info = context;
13219 break;
13221 case GIMPLE_OMP_FOR:
13222 inner_context = stmt;
13223 wi->info = inner_context;
13224 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13225 walk them. */
13226 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13227 diagnose_sb_1, NULL, wi);
13228 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13229 wi->info = context;
13230 break;
13232 case GIMPLE_LABEL:
13233 splay_tree_insert (all_labels,
13234 (splay_tree_key) gimple_label_label (
13235 as_a <glabel *> (stmt)),
13236 (splay_tree_value) context);
13237 break;
13239 default:
13240 break;
13243 return NULL_TREE;
13246 /* Pass 2: Check each branch and see if its context differs from that of
13247 the destination label's context. */
13249 static tree
13250 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13251 struct walk_stmt_info *wi)
13253 gimple *context = (gimple *) wi->info;
13254 splay_tree_node n;
13255 gimple *stmt = gsi_stmt (*gsi_p);
13257 *handled_ops_p = true;
13259 switch (gimple_code (stmt))
13261 WALK_SUBSTMTS;
13263 case GIMPLE_OMP_PARALLEL:
13264 case GIMPLE_OMP_TASK:
13265 case GIMPLE_OMP_SECTIONS:
13266 case GIMPLE_OMP_SINGLE:
13267 case GIMPLE_OMP_SECTION:
13268 case GIMPLE_OMP_MASTER:
13269 case GIMPLE_OMP_ORDERED:
13270 case GIMPLE_OMP_SCAN:
13271 case GIMPLE_OMP_CRITICAL:
13272 case GIMPLE_OMP_TARGET:
13273 case GIMPLE_OMP_TEAMS:
13274 case GIMPLE_OMP_TASKGROUP:
13275 wi->info = stmt;
13276 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13277 wi->info = context;
13278 break;
13280 case GIMPLE_OMP_FOR:
13281 wi->info = stmt;
13282 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13283 walk them. */
13284 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13285 diagnose_sb_2, NULL, wi);
13286 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13287 wi->info = context;
13288 break;
13290 case GIMPLE_COND:
13292 gcond *cond_stmt = as_a <gcond *> (stmt);
13293 tree lab = gimple_cond_true_label (cond_stmt);
13294 if (lab)
13296 n = splay_tree_lookup (all_labels,
13297 (splay_tree_key) lab);
13298 diagnose_sb_0 (gsi_p, context,
13299 n ? (gimple *) n->value : NULL);
13301 lab = gimple_cond_false_label (cond_stmt);
13302 if (lab)
13304 n = splay_tree_lookup (all_labels,
13305 (splay_tree_key) lab);
13306 diagnose_sb_0 (gsi_p, context,
13307 n ? (gimple *) n->value : NULL);
13310 break;
13312 case GIMPLE_GOTO:
13314 tree lab = gimple_goto_dest (stmt);
13315 if (TREE_CODE (lab) != LABEL_DECL)
13316 break;
13318 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13319 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13321 break;
13323 case GIMPLE_SWITCH:
13325 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13326 unsigned int i;
13327 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13329 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13330 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13331 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13332 break;
13335 break;
13337 case GIMPLE_RETURN:
13338 diagnose_sb_0 (gsi_p, context, NULL);
13339 break;
13341 default:
13342 break;
13345 return NULL_TREE;
13348 static unsigned int
13349 diagnose_omp_structured_block_errors (void)
13351 struct walk_stmt_info wi;
13352 gimple_seq body = gimple_body (current_function_decl);
13354 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13356 memset (&wi, 0, sizeof (wi));
13357 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13359 memset (&wi, 0, sizeof (wi));
13360 wi.want_locations = true;
13361 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13363 gimple_set_body (current_function_decl, body);
13365 splay_tree_delete (all_labels);
13366 all_labels = NULL;
13368 return 0;
13371 namespace {
13373 const pass_data pass_data_diagnose_omp_blocks =
13375 GIMPLE_PASS, /* type */
13376 "*diagnose_omp_blocks", /* name */
13377 OPTGROUP_OMP, /* optinfo_flags */
13378 TV_NONE, /* tv_id */
13379 PROP_gimple_any, /* properties_required */
13380 0, /* properties_provided */
13381 0, /* properties_destroyed */
13382 0, /* todo_flags_start */
13383 0, /* todo_flags_finish */
13386 class pass_diagnose_omp_blocks : public gimple_opt_pass
13388 public:
13389 pass_diagnose_omp_blocks (gcc::context *ctxt)
13390 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13393 /* opt_pass methods: */
13394 virtual bool gate (function *)
13396 return flag_openacc || flag_openmp || flag_openmp_simd;
13398 virtual unsigned int execute (function *)
13400 return diagnose_omp_structured_block_errors ();
13403 }; // class pass_diagnose_omp_blocks
13405 } // anon namespace
13407 gimple_opt_pass *
13408 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13410 return new pass_diagnose_omp_blocks (ctxt);
13414 #include "gt-omp-low.h"