PR middle-end/80853
[official-gcc.git] / gcc / omp-low.c
blob26e6586a0706d362bca19825534e9cf598bae14b
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
115 /* What to do with variables with implicitly determined sharing
116 attributes. */
117 enum omp_clause_default_kind default_kind;
119 /* Nesting depth of this context. Used to beautify error messages re
120 invalid gotos. The outermost ctx is depth 1, with depth 0 being
121 reserved for the main body of the function. */
122 int depth;
124 /* True if this parallel directive is nested within another. */
125 bool is_nested;
127 /* True if this construct can be cancelled. */
128 bool cancellable;
131 static splay_tree all_contexts;
132 static int taskreg_nesting_level;
133 static int target_nesting_level;
134 static bitmap task_shared_vars;
135 static vec<omp_context *> taskreg_contexts;
137 static void scan_omp (gimple_seq *, omp_context *);
138 static tree scan_omp_1_op (tree *, int *, void *);
140 #define WALK_SUBSTMTS \
141 case GIMPLE_BIND: \
142 case GIMPLE_TRY: \
143 case GIMPLE_CATCH: \
144 case GIMPLE_EH_FILTER: \
145 case GIMPLE_TRANSACTION: \
146 /* The sub-statements for these should be walked. */ \
147 *handled_ops_p = false; \
148 break;
150 /* Return true if CTX corresponds to an oacc parallel region. */
152 static bool
153 is_oacc_parallel (omp_context *ctx)
155 enum gimple_code outer_type = gimple_code (ctx->stmt);
156 return ((outer_type == GIMPLE_OMP_TARGET)
157 && (gimple_omp_target_kind (ctx->stmt)
158 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
161 /* Return true if CTX corresponds to an oacc kernels region. */
163 static bool
164 is_oacc_kernels (omp_context *ctx)
166 enum gimple_code outer_type = gimple_code (ctx->stmt);
167 return ((outer_type == GIMPLE_OMP_TARGET)
168 && (gimple_omp_target_kind (ctx->stmt)
169 == GF_OMP_TARGET_KIND_OACC_KERNELS));
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173 data member privatization, return the underlying "this" parameter,
174 otherwise return NULL. */
176 tree
177 omp_member_access_dummy_var (tree decl)
179 if (!VAR_P (decl)
180 || !DECL_ARTIFICIAL (decl)
181 || !DECL_IGNORED_P (decl)
182 || !DECL_HAS_VALUE_EXPR_P (decl)
183 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
184 return NULL_TREE;
186 tree v = DECL_VALUE_EXPR (decl);
187 if (TREE_CODE (v) != COMPONENT_REF)
188 return NULL_TREE;
190 while (1)
191 switch (TREE_CODE (v))
193 case COMPONENT_REF:
194 case MEM_REF:
195 case INDIRECT_REF:
196 CASE_CONVERT:
197 case POINTER_PLUS_EXPR:
198 v = TREE_OPERAND (v, 0);
199 continue;
200 case PARM_DECL:
201 if (DECL_CONTEXT (v) == current_function_decl
202 && DECL_ARTIFICIAL (v)
203 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
204 return v;
205 return NULL_TREE;
206 default:
207 return NULL_TREE;
211 /* Helper for unshare_and_remap, called through walk_tree. */
213 static tree
214 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
216 tree *pair = (tree *) data;
217 if (*tp == pair[0])
219 *tp = unshare_expr (pair[1]);
220 *walk_subtrees = 0;
222 else if (IS_TYPE_OR_DECL_P (*tp))
223 *walk_subtrees = 0;
224 return NULL_TREE;
227 /* Return unshare_expr (X) with all occurrences of FROM
228 replaced with TO. */
230 static tree
231 unshare_and_remap (tree x, tree from, tree to)
233 tree pair[2] = { from, to };
234 x = unshare_expr (x);
235 walk_tree (&x, unshare_and_remap_1, pair, NULL);
236 return x;
239 /* Convenience function for calling scan_omp_1_op on tree operands. */
241 static inline tree
242 scan_omp_op (tree *tp, omp_context *ctx)
244 struct walk_stmt_info wi;
246 memset (&wi, 0, sizeof (wi));
247 wi.info = ctx;
248 wi.want_locations = true;
250 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
253 static void lower_omp (gimple_seq *, omp_context *);
254 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
255 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
257 /* Return true if CTX is for an omp parallel. */
259 static inline bool
260 is_parallel_ctx (omp_context *ctx)
262 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
266 /* Return true if CTX is for an omp task. */
268 static inline bool
269 is_task_ctx (omp_context *ctx)
271 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
275 /* Return true if CTX is for an omp taskloop. */
277 static inline bool
278 is_taskloop_ctx (omp_context *ctx)
280 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
281 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
285 /* Return true if CTX is for an omp parallel or omp task. */
287 static inline bool
288 is_taskreg_ctx (omp_context *ctx)
290 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
293 /* Return true if EXPR is variable sized. */
295 static inline bool
296 is_variable_sized (const_tree expr)
298 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
301 /* Lookup variables. The "maybe" form
302 allows for the variable form to not have been entered, otherwise we
303 assert that the variable must have been entered. */
305 static inline tree
306 lookup_decl (tree var, omp_context *ctx)
308 tree *n = ctx->cb.decl_map->get (var);
309 return *n;
312 static inline tree
313 maybe_lookup_decl (const_tree var, omp_context *ctx)
315 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
316 return n ? *n : NULL_TREE;
319 static inline tree
320 lookup_field (tree var, omp_context *ctx)
322 splay_tree_node n;
323 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
324 return (tree) n->value;
327 static inline tree
328 lookup_sfield (splay_tree_key key, omp_context *ctx)
330 splay_tree_node n;
331 n = splay_tree_lookup (ctx->sfield_map
332 ? ctx->sfield_map : ctx->field_map, key);
333 return (tree) n->value;
336 static inline tree
337 lookup_sfield (tree var, omp_context *ctx)
339 return lookup_sfield ((splay_tree_key) var, ctx);
342 static inline tree
343 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
345 splay_tree_node n;
346 n = splay_tree_lookup (ctx->field_map, key);
347 return n ? (tree) n->value : NULL_TREE;
350 static inline tree
351 maybe_lookup_field (tree var, omp_context *ctx)
353 return maybe_lookup_field ((splay_tree_key) var, ctx);
356 /* Return true if DECL should be copied by pointer. SHARED_CTX is
357 the parallel context if DECL is to be shared. */
359 static bool
360 use_pointer_for_field (tree decl, omp_context *shared_ctx)
362 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
363 || TYPE_ATOMIC (TREE_TYPE (decl)))
364 return true;
366 /* We can only use copy-in/copy-out semantics for shared variables
367 when we know the value is not accessible from an outer scope. */
368 if (shared_ctx)
370 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
372 /* ??? Trivially accessible from anywhere. But why would we even
373 be passing an address in this case? Should we simply assert
374 this to be false, or should we have a cleanup pass that removes
375 these from the list of mappings? */
376 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
377 return true;
379 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 without analyzing the expression whether or not its location
381 is accessible to anyone else. In the case of nested parallel
382 regions it certainly may be. */
383 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
384 return true;
386 /* Do not use copy-in/copy-out for variables that have their
387 address taken. */
388 if (TREE_ADDRESSABLE (decl))
389 return true;
391 /* lower_send_shared_vars only uses copy-in, but not copy-out
392 for these. */
393 if (TREE_READONLY (decl)
394 || ((TREE_CODE (decl) == RESULT_DECL
395 || TREE_CODE (decl) == PARM_DECL)
396 && DECL_BY_REFERENCE (decl)))
397 return false;
399 /* Disallow copy-in/out in nested parallel if
400 decl is shared in outer parallel, otherwise
401 each thread could store the shared variable
402 in its own copy-in location, making the
403 variable no longer really shared. */
404 if (shared_ctx->is_nested)
406 omp_context *up;
408 for (up = shared_ctx->outer; up; up = up->outer)
409 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
410 break;
412 if (up)
414 tree c;
416 for (c = gimple_omp_taskreg_clauses (up->stmt);
417 c; c = OMP_CLAUSE_CHAIN (c))
418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
419 && OMP_CLAUSE_DECL (c) == decl)
420 break;
422 if (c)
423 goto maybe_mark_addressable_and_ret;
427 /* For tasks avoid using copy-in/out. As tasks can be
428 deferred or executed in different thread, when GOMP_task
429 returns, the task hasn't necessarily terminated. */
430 if (is_task_ctx (shared_ctx))
432 tree outer;
433 maybe_mark_addressable_and_ret:
434 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
435 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
437 /* Taking address of OUTER in lower_send_shared_vars
438 might need regimplification of everything that uses the
439 variable. */
440 if (!task_shared_vars)
441 task_shared_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
443 TREE_ADDRESSABLE (outer) = 1;
445 return true;
449 return false;
452 /* Construct a new automatic decl similar to VAR. */
454 static tree
455 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
457 tree copy = copy_var_decl (var, name, type);
459 DECL_CONTEXT (copy) = current_function_decl;
460 DECL_CHAIN (copy) = ctx->block_vars;
461 /* If VAR is listed in task_shared_vars, it means it wasn't
462 originally addressable and is just because task needs to take
463 it's address. But we don't need to take address of privatizations
464 from that var. */
465 if (TREE_ADDRESSABLE (var)
466 && task_shared_vars
467 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
468 TREE_ADDRESSABLE (copy) = 0;
469 ctx->block_vars = copy;
471 return copy;
474 static tree
475 omp_copy_decl_1 (tree var, omp_context *ctx)
477 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 as appropriate. */
482 static tree
483 omp_build_component_ref (tree obj, tree field)
485 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
486 if (TREE_THIS_VOLATILE (field))
487 TREE_THIS_VOLATILE (ret) |= 1;
488 if (TREE_READONLY (field))
489 TREE_READONLY (ret) |= 1;
490 return ret;
493 /* Build tree nodes to access the field for VAR on the receiver side. */
495 static tree
496 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
498 tree x, field = lookup_field (var, ctx);
500 /* If the receiver record type was remapped in the child function,
501 remap the field into the new record type. */
502 x = maybe_lookup_field (field, ctx);
503 if (x != NULL)
504 field = x;
506 x = build_simple_mem_ref (ctx->receiver_decl);
507 TREE_THIS_NOTRAP (x) = 1;
508 x = omp_build_component_ref (x, field);
509 if (by_ref)
511 x = build_simple_mem_ref (x);
512 TREE_THIS_NOTRAP (x) = 1;
515 return x;
518 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
519 of a parallel, this is a component reference; for workshare constructs
520 this is some variable. */
522 static tree
523 build_outer_var_ref (tree var, omp_context *ctx,
524 enum omp_clause_code code = OMP_CLAUSE_ERROR)
526 tree x;
528 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
529 x = var;
530 else if (is_variable_sized (var))
532 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
533 x = build_outer_var_ref (x, ctx, code);
534 x = build_simple_mem_ref (x);
536 else if (is_taskreg_ctx (ctx))
538 bool by_ref = use_pointer_for_field (var, NULL);
539 x = build_receiver_ref (var, by_ref, ctx);
541 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
542 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
543 || (code == OMP_CLAUSE_PRIVATE
544 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
545 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
546 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
548 /* #pragma omp simd isn't a worksharing construct, and can reference
549 even private vars in its linear etc. clauses.
550 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 to private vars in all worksharing constructs. */
552 x = NULL_TREE;
553 if (ctx->outer && is_taskreg_ctx (ctx))
554 x = lookup_decl (var, ctx->outer);
555 else if (ctx->outer)
556 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
557 if (x == NULL_TREE)
558 x = var;
560 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
562 gcc_assert (ctx->outer);
563 splay_tree_node n
564 = splay_tree_lookup (ctx->outer->field_map,
565 (splay_tree_key) &DECL_UID (var));
566 if (n == NULL)
568 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
569 x = var;
570 else
571 x = lookup_decl (var, ctx->outer);
573 else
575 tree field = (tree) n->value;
576 /* If the receiver record type was remapped in the child function,
577 remap the field into the new record type. */
578 x = maybe_lookup_field (field, ctx->outer);
579 if (x != NULL)
580 field = x;
582 x = build_simple_mem_ref (ctx->outer->receiver_decl);
583 x = omp_build_component_ref (x, field);
584 if (use_pointer_for_field (var, ctx->outer))
585 x = build_simple_mem_ref (x);
588 else if (ctx->outer)
590 omp_context *outer = ctx->outer;
591 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
593 outer = outer->outer;
594 gcc_assert (outer
595 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
597 x = lookup_decl (var, outer);
599 else if (omp_is_reference (var))
600 /* This can happen with orphaned constructs. If var is reference, it is
601 possible it is shared and as such valid. */
602 x = var;
603 else if (omp_member_access_dummy_var (var))
604 x = var;
605 else
606 gcc_unreachable ();
608 if (x == var)
610 tree t = omp_member_access_dummy_var (var);
611 if (t)
613 x = DECL_VALUE_EXPR (var);
614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
615 if (o != t)
616 x = unshare_and_remap (x, t, o);
617 else
618 x = unshare_expr (x);
622 if (omp_is_reference (var))
623 x = build_simple_mem_ref (x);
625 return x;
628 /* Build tree nodes to access the field for VAR on the sender side. */
630 static tree
631 build_sender_ref (splay_tree_key key, omp_context *ctx)
633 tree field = lookup_sfield (key, ctx);
634 return omp_build_component_ref (ctx->sender_decl, field);
637 static tree
638 build_sender_ref (tree var, omp_context *ctx)
640 return build_sender_ref ((splay_tree_key) var, ctx);
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
644 BASE_POINTERS_RESTRICT, declare the field with restrict. */
646 static void
647 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
648 bool base_pointers_restrict = false)
650 tree field, type, sfield = NULL_TREE;
651 splay_tree_key key = (splay_tree_key) var;
653 if ((mask & 8) != 0)
655 key = (splay_tree_key) &DECL_UID (var);
656 gcc_checking_assert (key != (splay_tree_key) var);
658 gcc_assert ((mask & 1) == 0
659 || !splay_tree_lookup (ctx->field_map, key));
660 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
661 || !splay_tree_lookup (ctx->sfield_map, key));
662 gcc_assert ((mask & 3) == 3
663 || !is_gimple_omp_oacc (ctx->stmt));
665 type = TREE_TYPE (var);
666 /* Prevent redeclaring the var in the split-off function with a restrict
667 pointer type. Note that we only clear type itself, restrict qualifiers in
668 the pointed-to type will be ignored by points-to analysis. */
669 if (POINTER_TYPE_P (type)
670 && TYPE_RESTRICT (type))
671 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
673 if (mask & 4)
675 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
676 type = build_pointer_type (build_pointer_type (type));
678 else if (by_ref)
680 type = build_pointer_type (type);
681 if (base_pointers_restrict)
682 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
684 else if ((mask & 3) == 1 && omp_is_reference (var))
685 type = TREE_TYPE (type);
687 field = build_decl (DECL_SOURCE_LOCATION (var),
688 FIELD_DECL, DECL_NAME (var), type);
690 /* Remember what variable this field was created for. This does have a
691 side effect of making dwarf2out ignore this member, so for helpful
692 debugging we clear it later in delete_omp_context. */
693 DECL_ABSTRACT_ORIGIN (field) = var;
694 if (type == TREE_TYPE (var))
696 SET_DECL_ALIGN (field, DECL_ALIGN (var));
697 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
698 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
700 else
701 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
703 if ((mask & 3) == 3)
705 insert_field_into_struct (ctx->record_type, field);
706 if (ctx->srecord_type)
708 sfield = build_decl (DECL_SOURCE_LOCATION (var),
709 FIELD_DECL, DECL_NAME (var), type);
710 DECL_ABSTRACT_ORIGIN (sfield) = var;
711 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
712 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
713 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
714 insert_field_into_struct (ctx->srecord_type, sfield);
717 else
719 if (ctx->srecord_type == NULL_TREE)
721 tree t;
723 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
724 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
725 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
727 sfield = build_decl (DECL_SOURCE_LOCATION (t),
728 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
729 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
730 insert_field_into_struct (ctx->srecord_type, sfield);
731 splay_tree_insert (ctx->sfield_map,
732 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
733 (splay_tree_value) sfield);
736 sfield = field;
737 insert_field_into_struct ((mask & 1) ? ctx->record_type
738 : ctx->srecord_type, field);
741 if (mask & 1)
742 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
743 if ((mask & 2) && ctx->sfield_map)
744 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
747 static tree
748 install_var_local (tree var, omp_context *ctx)
750 tree new_var = omp_copy_decl_1 (var, ctx);
751 insert_decl_map (&ctx->cb, var, new_var);
752 return new_var;
755 /* Adjust the replacement for DECL in CTX for the new context. This means
756 copying the DECL_VALUE_EXPR, and fixing up the type. */
758 static void
759 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
761 tree new_decl, size;
763 new_decl = lookup_decl (decl, ctx);
765 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
767 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
768 && DECL_HAS_VALUE_EXPR_P (decl))
770 tree ve = DECL_VALUE_EXPR (decl);
771 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
772 SET_DECL_VALUE_EXPR (new_decl, ve);
773 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
776 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
778 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
779 if (size == error_mark_node)
780 size = TYPE_SIZE (TREE_TYPE (new_decl));
781 DECL_SIZE (new_decl) = size;
783 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
784 if (size == error_mark_node)
785 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
786 DECL_SIZE_UNIT (new_decl) = size;
790 /* The callback for remap_decl. Search all containing contexts for a
791 mapping of the variable; this avoids having to duplicate the splay
792 tree ahead of time. We know a mapping doesn't already exist in the
793 given context. Create new mappings to implement default semantics. */
795 static tree
796 omp_copy_decl (tree var, copy_body_data *cb)
798 omp_context *ctx = (omp_context *) cb;
799 tree new_var;
801 if (TREE_CODE (var) == LABEL_DECL)
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_DEFAULT:
1166 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1167 break;
1169 case OMP_CLAUSE_FINAL:
1170 case OMP_CLAUSE_IF:
1171 case OMP_CLAUSE_NUM_THREADS:
1172 case OMP_CLAUSE_NUM_TEAMS:
1173 case OMP_CLAUSE_THREAD_LIMIT:
1174 case OMP_CLAUSE_DEVICE:
1175 case OMP_CLAUSE_SCHEDULE:
1176 case OMP_CLAUSE_DIST_SCHEDULE:
1177 case OMP_CLAUSE_DEPEND:
1178 case OMP_CLAUSE_PRIORITY:
1179 case OMP_CLAUSE_GRAINSIZE:
1180 case OMP_CLAUSE_NUM_TASKS:
1181 case OMP_CLAUSE__CILK_FOR_COUNT_:
1182 case OMP_CLAUSE_NUM_GANGS:
1183 case OMP_CLAUSE_NUM_WORKERS:
1184 case OMP_CLAUSE_VECTOR_LENGTH:
1185 if (ctx->outer)
1186 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1187 break;
1189 case OMP_CLAUSE_TO:
1190 case OMP_CLAUSE_FROM:
1191 case OMP_CLAUSE_MAP:
1192 if (ctx->outer)
1193 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1194 decl = OMP_CLAUSE_DECL (c);
1195 /* Global variables with "omp declare target" attribute
1196 don't need to be copied, the receiver side will use them
1197 directly. However, global variables with "omp declare target link"
1198 attribute need to be copied. */
1199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1200 && DECL_P (decl)
1201 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1202 && (OMP_CLAUSE_MAP_KIND (c)
1203 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1204 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1205 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1206 && varpool_node::get_create (decl)->offloadable
1207 && !lookup_attribute ("omp declare target link",
1208 DECL_ATTRIBUTES (decl)))
1209 break;
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1211 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1213 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1214 not offloaded; there is nothing to map for those. */
1215 if (!is_gimple_omp_offloaded (ctx->stmt)
1216 && !POINTER_TYPE_P (TREE_TYPE (decl))
1217 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1218 break;
1220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1221 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1222 || (OMP_CLAUSE_MAP_KIND (c)
1223 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1225 if (TREE_CODE (decl) == COMPONENT_REF
1226 || (TREE_CODE (decl) == INDIRECT_REF
1227 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1228 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1229 == REFERENCE_TYPE)))
1230 break;
1231 if (DECL_SIZE (decl)
1232 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1234 tree decl2 = DECL_VALUE_EXPR (decl);
1235 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1236 decl2 = TREE_OPERAND (decl2, 0);
1237 gcc_assert (DECL_P (decl2));
1238 install_var_local (decl2, ctx);
1240 install_var_local (decl, ctx);
1241 break;
1243 if (DECL_P (decl))
1245 if (DECL_SIZE (decl)
1246 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1248 tree decl2 = DECL_VALUE_EXPR (decl);
1249 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1250 decl2 = TREE_OPERAND (decl2, 0);
1251 gcc_assert (DECL_P (decl2));
1252 install_var_field (decl2, true, 3, ctx);
1253 install_var_local (decl2, ctx);
1254 install_var_local (decl, ctx);
1256 else
1258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1259 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1260 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1261 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1262 install_var_field (decl, true, 7, ctx);
1263 else
1264 install_var_field (decl, true, 3, ctx,
1265 base_pointers_restrict);
1266 if (is_gimple_omp_offloaded (ctx->stmt)
1267 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1268 install_var_local (decl, ctx);
1271 else
1273 tree base = get_base_address (decl);
1274 tree nc = OMP_CLAUSE_CHAIN (c);
1275 if (DECL_P (base)
1276 && nc != NULL_TREE
1277 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1278 && OMP_CLAUSE_DECL (nc) == base
1279 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1280 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1282 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1283 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1285 else
1287 if (ctx->outer)
1289 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1290 decl = OMP_CLAUSE_DECL (c);
1292 gcc_assert (!splay_tree_lookup (ctx->field_map,
1293 (splay_tree_key) decl));
1294 tree field
1295 = build_decl (OMP_CLAUSE_LOCATION (c),
1296 FIELD_DECL, NULL_TREE, ptr_type_node);
1297 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1298 insert_field_into_struct (ctx->record_type, field);
1299 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1300 (splay_tree_value) field);
1303 break;
1305 case OMP_CLAUSE__GRIDDIM_:
1306 if (ctx->outer)
1308 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1309 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1311 break;
1313 case OMP_CLAUSE_NOWAIT:
1314 case OMP_CLAUSE_ORDERED:
1315 case OMP_CLAUSE_COLLAPSE:
1316 case OMP_CLAUSE_UNTIED:
1317 case OMP_CLAUSE_MERGEABLE:
1318 case OMP_CLAUSE_PROC_BIND:
1319 case OMP_CLAUSE_SAFELEN:
1320 case OMP_CLAUSE_SIMDLEN:
1321 case OMP_CLAUSE_THREADS:
1322 case OMP_CLAUSE_SIMD:
1323 case OMP_CLAUSE_NOGROUP:
1324 case OMP_CLAUSE_DEFAULTMAP:
1325 case OMP_CLAUSE_ASYNC:
1326 case OMP_CLAUSE_WAIT:
1327 case OMP_CLAUSE_GANG:
1328 case OMP_CLAUSE_WORKER:
1329 case OMP_CLAUSE_VECTOR:
1330 case OMP_CLAUSE_INDEPENDENT:
1331 case OMP_CLAUSE_AUTO:
1332 case OMP_CLAUSE_SEQ:
1333 case OMP_CLAUSE_TILE:
1334 case OMP_CLAUSE__SIMT_:
1335 break;
1337 case OMP_CLAUSE_ALIGNED:
1338 decl = OMP_CLAUSE_DECL (c);
1339 if (is_global_var (decl)
1340 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1341 install_var_local (decl, ctx);
1342 break;
1344 case OMP_CLAUSE__CACHE_:
1345 default:
1346 gcc_unreachable ();
1350 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1352 switch (OMP_CLAUSE_CODE (c))
1354 case OMP_CLAUSE_LASTPRIVATE:
1355 /* Let the corresponding firstprivate clause create
1356 the variable. */
1357 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1358 scan_array_reductions = true;
1359 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1360 break;
1361 /* FALLTHRU */
1363 case OMP_CLAUSE_FIRSTPRIVATE:
1364 case OMP_CLAUSE_PRIVATE:
1365 case OMP_CLAUSE_LINEAR:
1366 case OMP_CLAUSE_IS_DEVICE_PTR:
1367 decl = OMP_CLAUSE_DECL (c);
1368 if (is_variable_sized (decl))
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 tree decl2 = DECL_VALUE_EXPR (decl);
1375 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1376 decl2 = TREE_OPERAND (decl2, 0);
1377 gcc_assert (DECL_P (decl2));
1378 install_var_local (decl2, ctx);
1379 fixup_remapped_decl (decl2, ctx, false);
1381 install_var_local (decl, ctx);
1383 fixup_remapped_decl (decl, ctx,
1384 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1385 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1386 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1387 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1388 scan_array_reductions = true;
1389 break;
1391 case OMP_CLAUSE_REDUCTION:
1392 decl = OMP_CLAUSE_DECL (c);
1393 if (TREE_CODE (decl) != MEM_REF)
1395 if (is_variable_sized (decl))
1396 install_var_local (decl, ctx);
1397 fixup_remapped_decl (decl, ctx, false);
1399 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1400 scan_array_reductions = true;
1401 break;
1403 case OMP_CLAUSE_SHARED:
1404 /* Ignore shared directives in teams construct. */
1405 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1406 break;
1407 decl = OMP_CLAUSE_DECL (c);
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1409 break;
1410 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1412 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1413 ctx->outer)))
1414 break;
1415 bool by_ref = use_pointer_for_field (decl, ctx);
1416 install_var_field (decl, by_ref, 11, ctx);
1417 break;
1419 fixup_remapped_decl (decl, ctx, false);
1420 break;
1422 case OMP_CLAUSE_MAP:
1423 if (!is_gimple_omp_offloaded (ctx->stmt))
1424 break;
1425 decl = OMP_CLAUSE_DECL (c);
1426 if (DECL_P (decl)
1427 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1428 && (OMP_CLAUSE_MAP_KIND (c)
1429 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1430 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1431 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1432 && varpool_node::get_create (decl)->offloadable)
1433 break;
1434 if (DECL_P (decl))
1436 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1437 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1438 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1439 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1441 tree new_decl = lookup_decl (decl, ctx);
1442 TREE_TYPE (new_decl)
1443 = remap_type (TREE_TYPE (decl), &ctx->cb);
1445 else if (DECL_SIZE (decl)
1446 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1448 tree decl2 = DECL_VALUE_EXPR (decl);
1449 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1450 decl2 = TREE_OPERAND (decl2, 0);
1451 gcc_assert (DECL_P (decl2));
1452 fixup_remapped_decl (decl2, ctx, false);
1453 fixup_remapped_decl (decl, ctx, true);
1455 else
1456 fixup_remapped_decl (decl, ctx, false);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 case OMP_CLAUSE_DEFAULT:
1463 case OMP_CLAUSE_IF:
1464 case OMP_CLAUSE_NUM_THREADS:
1465 case OMP_CLAUSE_NUM_TEAMS:
1466 case OMP_CLAUSE_THREAD_LIMIT:
1467 case OMP_CLAUSE_DEVICE:
1468 case OMP_CLAUSE_SCHEDULE:
1469 case OMP_CLAUSE_DIST_SCHEDULE:
1470 case OMP_CLAUSE_NOWAIT:
1471 case OMP_CLAUSE_ORDERED:
1472 case OMP_CLAUSE_COLLAPSE:
1473 case OMP_CLAUSE_UNTIED:
1474 case OMP_CLAUSE_FINAL:
1475 case OMP_CLAUSE_MERGEABLE:
1476 case OMP_CLAUSE_PROC_BIND:
1477 case OMP_CLAUSE_SAFELEN:
1478 case OMP_CLAUSE_SIMDLEN:
1479 case OMP_CLAUSE_ALIGNED:
1480 case OMP_CLAUSE_DEPEND:
1481 case OMP_CLAUSE__LOOPTEMP_:
1482 case OMP_CLAUSE_TO:
1483 case OMP_CLAUSE_FROM:
1484 case OMP_CLAUSE_PRIORITY:
1485 case OMP_CLAUSE_GRAINSIZE:
1486 case OMP_CLAUSE_NUM_TASKS:
1487 case OMP_CLAUSE_THREADS:
1488 case OMP_CLAUSE_SIMD:
1489 case OMP_CLAUSE_NOGROUP:
1490 case OMP_CLAUSE_DEFAULTMAP:
1491 case OMP_CLAUSE_USE_DEVICE_PTR:
1492 case OMP_CLAUSE__CILK_FOR_COUNT_:
1493 case OMP_CLAUSE_ASYNC:
1494 case OMP_CLAUSE_WAIT:
1495 case OMP_CLAUSE_NUM_GANGS:
1496 case OMP_CLAUSE_NUM_WORKERS:
1497 case OMP_CLAUSE_VECTOR_LENGTH:
1498 case OMP_CLAUSE_GANG:
1499 case OMP_CLAUSE_WORKER:
1500 case OMP_CLAUSE_VECTOR:
1501 case OMP_CLAUSE_INDEPENDENT:
1502 case OMP_CLAUSE_AUTO:
1503 case OMP_CLAUSE_SEQ:
1504 case OMP_CLAUSE_TILE:
1505 case OMP_CLAUSE__GRIDDIM_:
1506 case OMP_CLAUSE__SIMT_:
1507 break;
1509 case OMP_CLAUSE__CACHE_:
1510 default:
1511 gcc_unreachable ();
1515 gcc_checking_assert (!scan_array_reductions
1516 || !is_gimple_omp_oacc (ctx->stmt));
1517 if (scan_array_reductions)
1519 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1521 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1523 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1524 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1527 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1529 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1530 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1531 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1535 /* Create a new name for omp child function. Returns an identifier. If
1536 IS_CILK_FOR is true then the suffix for the child function is
1537 "_cilk_for_fn." */
1539 static tree
1540 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1542 if (is_cilk_for)
1543 return clone_function_name (current_function_decl, "_cilk_for_fn");
1544 return clone_function_name (current_function_decl,
1545 task_copy ? "_omp_cpyfn" : "_omp_fn");
1548 /* Returns the type of the induction variable for the child function for
1549 _Cilk_for and the types for _high and _low variables based on TYPE. */
1551 static tree
1552 cilk_for_check_loop_diff_type (tree type)
1554 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1556 if (TYPE_UNSIGNED (type))
1557 return uint32_type_node;
1558 else
1559 return integer_type_node;
1561 else
1563 if (TYPE_UNSIGNED (type))
1564 return uint64_type_node;
1565 else
1566 return long_long_integer_type_node;
1570 /* Return true if CTX may belong to offloaded code: either if current function
1571 is offloaded, or any enclosing context corresponds to a target region. */
1573 static bool
1574 omp_maybe_offloaded_ctx (omp_context *ctx)
1576 if (cgraph_node::get (current_function_decl)->offloadable)
1577 return true;
1578 for (; ctx; ctx = ctx->outer)
1579 if (is_gimple_omp_offloaded (ctx->stmt))
1580 return true;
1581 return false;
1584 /* Build a decl for the omp child function. It'll not contain a body
1585 yet, just the bare decl. */
1587 static void
1588 create_omp_child_function (omp_context *ctx, bool task_copy)
1590 tree decl, type, name, t;
1592 tree cilk_for_count
1593 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1594 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1595 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1596 tree cilk_var_type = NULL_TREE;
1598 name = create_omp_child_function_name (task_copy,
1599 cilk_for_count != NULL_TREE);
1600 if (task_copy)
1601 type = build_function_type_list (void_type_node, ptr_type_node,
1602 ptr_type_node, NULL_TREE);
1603 else if (cilk_for_count)
1605 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1606 cilk_var_type = cilk_for_check_loop_diff_type (type);
1607 type = build_function_type_list (void_type_node, ptr_type_node,
1608 cilk_var_type, cilk_var_type, NULL_TREE);
1610 else
1611 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1613 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1615 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1616 || !task_copy);
1617 if (!task_copy)
1618 ctx->cb.dst_fn = decl;
1619 else
1620 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1622 TREE_STATIC (decl) = 1;
1623 TREE_USED (decl) = 1;
1624 DECL_ARTIFICIAL (decl) = 1;
1625 DECL_IGNORED_P (decl) = 0;
1626 TREE_PUBLIC (decl) = 0;
1627 DECL_UNINLINABLE (decl) = 1;
1628 DECL_EXTERNAL (decl) = 0;
1629 DECL_CONTEXT (decl) = NULL_TREE;
1630 DECL_INITIAL (decl) = make_node (BLOCK);
1631 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1632 if (omp_maybe_offloaded_ctx (ctx))
1634 cgraph_node::get_create (decl)->offloadable = 1;
1635 if (ENABLE_OFFLOADING)
1636 g->have_offload = true;
1639 if (cgraph_node::get_create (decl)->offloadable
1640 && !lookup_attribute ("omp declare target",
1641 DECL_ATTRIBUTES (current_function_decl)))
1643 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1644 ? "omp target entrypoint"
1645 : "omp declare target");
1646 DECL_ATTRIBUTES (decl)
1647 = tree_cons (get_identifier (target_attr),
1648 NULL_TREE, DECL_ATTRIBUTES (decl));
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 RESULT_DECL, NULL_TREE, void_type_node);
1653 DECL_ARTIFICIAL (t) = 1;
1654 DECL_IGNORED_P (t) = 1;
1655 DECL_CONTEXT (t) = decl;
1656 DECL_RESULT (decl) = t;
1658 /* _Cilk_for's child function requires two extra parameters called
1659 __low and __high that are set the by Cilk runtime when it calls this
1660 function. */
1661 if (cilk_for_count)
1663 t = build_decl (DECL_SOURCE_LOCATION (decl),
1664 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1665 DECL_ARTIFICIAL (t) = 1;
1666 DECL_NAMELESS (t) = 1;
1667 DECL_ARG_TYPE (t) = ptr_type_node;
1668 DECL_CONTEXT (t) = current_function_decl;
1669 TREE_USED (t) = 1;
1670 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1671 DECL_ARGUMENTS (decl) = t;
1673 t = build_decl (DECL_SOURCE_LOCATION (decl),
1674 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1675 DECL_ARTIFICIAL (t) = 1;
1676 DECL_NAMELESS (t) = 1;
1677 DECL_ARG_TYPE (t) = ptr_type_node;
1678 DECL_CONTEXT (t) = current_function_decl;
1679 TREE_USED (t) = 1;
1680 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1681 DECL_ARGUMENTS (decl) = t;
1684 tree data_name = get_identifier (".omp_data_i");
1685 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1686 ptr_type_node);
1687 DECL_ARTIFICIAL (t) = 1;
1688 DECL_NAMELESS (t) = 1;
1689 DECL_ARG_TYPE (t) = ptr_type_node;
1690 DECL_CONTEXT (t) = current_function_decl;
1691 TREE_USED (t) = 1;
1692 TREE_READONLY (t) = 1;
1693 if (cilk_for_count)
1694 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1695 DECL_ARGUMENTS (decl) = t;
1696 if (!task_copy)
1697 ctx->receiver_decl = t;
1698 else
1700 t = build_decl (DECL_SOURCE_LOCATION (decl),
1701 PARM_DECL, get_identifier (".omp_data_o"),
1702 ptr_type_node);
1703 DECL_ARTIFICIAL (t) = 1;
1704 DECL_NAMELESS (t) = 1;
1705 DECL_ARG_TYPE (t) = ptr_type_node;
1706 DECL_CONTEXT (t) = current_function_decl;
1707 TREE_USED (t) = 1;
1708 TREE_ADDRESSABLE (t) = 1;
1709 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1710 DECL_ARGUMENTS (decl) = t;
1713 /* Allocate memory for the function structure. The call to
1714 allocate_struct_function clobbers CFUN, so we need to restore
1715 it afterward. */
1716 push_struct_function (decl);
1717 cfun->function_end_locus = gimple_location (ctx->stmt);
1718 init_tree_ssa (cfun);
1719 pop_cfun ();
1722 /* Callback for walk_gimple_seq. Check if combined parallel
1723 contains gimple_omp_for_combined_into_p OMP_FOR. */
1725 tree
1726 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1727 bool *handled_ops_p,
1728 struct walk_stmt_info *wi)
1730 gimple *stmt = gsi_stmt (*gsi_p);
1732 *handled_ops_p = true;
1733 switch (gimple_code (stmt))
1735 WALK_SUBSTMTS;
1737 case GIMPLE_OMP_FOR:
1738 if (gimple_omp_for_combined_into_p (stmt)
1739 && gimple_omp_for_kind (stmt)
1740 == *(const enum gf_mask *) (wi->info))
1742 wi->info = stmt;
1743 return integer_zero_node;
1745 break;
1746 default:
1747 break;
1749 return NULL;
1752 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1754 static void
1755 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1756 omp_context *outer_ctx)
1758 struct walk_stmt_info wi;
1760 memset (&wi, 0, sizeof (wi));
1761 wi.val_only = true;
1762 wi.info = (void *) &msk;
1763 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1764 if (wi.info != (void *) &msk)
1766 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1767 struct omp_for_data fd;
1768 omp_extract_for_data (for_stmt, &fd, NULL);
1769 /* We need two temporaries with fd.loop.v type (istart/iend)
1770 and then (fd.collapse - 1) temporaries with the same
1771 type for count2 ... countN-1 vars if not constant. */
1772 size_t count = 2, i;
1773 tree type = fd.iter_type;
1774 if (fd.collapse > 1
1775 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1777 count += fd.collapse - 1;
1778 /* If there are lastprivate clauses on the inner
1779 GIMPLE_OMP_FOR, add one more temporaries for the total number
1780 of iterations (product of count1 ... countN-1). */
1781 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1782 OMP_CLAUSE_LASTPRIVATE))
1783 count++;
1784 else if (msk == GF_OMP_FOR_KIND_FOR
1785 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1786 OMP_CLAUSE_LASTPRIVATE))
1787 count++;
1789 for (i = 0; i < count; i++)
1791 tree temp = create_tmp_var (type);
1792 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1793 insert_decl_map (&outer_ctx->cb, temp, temp);
1794 OMP_CLAUSE_DECL (c) = temp;
1795 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1796 gimple_omp_taskreg_set_clauses (stmt, c);
1801 /* Scan an OpenMP parallel directive. */
1803 static void
1804 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 omp_context *ctx;
1807 tree name;
1808 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1810 /* Ignore parallel directives with empty bodies, unless there
1811 are copyin clauses. */
1812 if (optimize > 0
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1815 OMP_CLAUSE_COPYIN) == NULL)
1817 gsi_replace (gsi, gimple_build_nop (), false);
1818 return;
1821 if (gimple_omp_parallel_combined_p (stmt))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1824 ctx = new_omp_context (stmt, outer_ctx);
1825 taskreg_contexts.safe_push (ctx);
1826 if (taskreg_nesting_level > 1)
1827 ctx->is_nested = true;
1828 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1829 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1830 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1831 name = create_tmp_var_name (".omp_data_s");
1832 name = build_decl (gimple_location (stmt),
1833 TYPE_DECL, name, ctx->record_type);
1834 DECL_ARTIFICIAL (name) = 1;
1835 DECL_NAMELESS (name) = 1;
1836 TYPE_NAME (ctx->record_type) = name;
1837 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1838 if (!gimple_omp_parallel_grid_phony (stmt))
1840 create_omp_child_function (ctx, false);
1841 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1844 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1845 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1847 if (TYPE_FIELDS (ctx->record_type) == NULL)
1848 ctx->record_type = ctx->receiver_decl = NULL;
1851 /* Scan an OpenMP task directive. */
1853 static void
1854 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1856 omp_context *ctx;
1857 tree name, t;
1858 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1860 /* Ignore task directives with empty bodies, unless they have depend
1861 clause. */
1862 if (optimize > 0
1863 && empty_body_p (gimple_omp_body (stmt))
1864 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1866 gsi_replace (gsi, gimple_build_nop (), false);
1867 return;
1870 if (gimple_omp_task_taskloop_p (stmt))
1871 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1873 ctx = new_omp_context (stmt, outer_ctx);
1874 taskreg_contexts.safe_push (ctx);
1875 if (taskreg_nesting_level > 1)
1876 ctx->is_nested = true;
1877 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1878 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1879 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1880 name = create_tmp_var_name (".omp_data_s");
1881 name = build_decl (gimple_location (stmt),
1882 TYPE_DECL, name, ctx->record_type);
1883 DECL_ARTIFICIAL (name) = 1;
1884 DECL_NAMELESS (name) = 1;
1885 TYPE_NAME (ctx->record_type) = name;
1886 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1887 create_omp_child_function (ctx, false);
1888 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1890 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1892 if (ctx->srecord_type)
1894 name = create_tmp_var_name (".omp_data_a");
1895 name = build_decl (gimple_location (stmt),
1896 TYPE_DECL, name, ctx->srecord_type);
1897 DECL_ARTIFICIAL (name) = 1;
1898 DECL_NAMELESS (name) = 1;
1899 TYPE_NAME (ctx->srecord_type) = name;
1900 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1901 create_omp_child_function (ctx, true);
1904 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1906 if (TYPE_FIELDS (ctx->record_type) == NULL)
1908 ctx->record_type = ctx->receiver_decl = NULL;
1909 t = build_int_cst (long_integer_type_node, 0);
1910 gimple_omp_task_set_arg_size (stmt, t);
1911 t = build_int_cst (long_integer_type_node, 1);
1912 gimple_omp_task_set_arg_align (stmt, t);
1917 /* If any decls have been made addressable during scan_omp,
1918 adjust their fields if needed, and layout record types
1919 of parallel/task constructs. */
1921 static void
1922 finish_taskreg_scan (omp_context *ctx)
1924 if (ctx->record_type == NULL_TREE)
1925 return;
1927 /* If any task_shared_vars were needed, verify all
1928 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1929 statements if use_pointer_for_field hasn't changed
1930 because of that. If it did, update field types now. */
1931 if (task_shared_vars)
1933 tree c;
1935 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1936 c; c = OMP_CLAUSE_CHAIN (c))
1937 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1938 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1940 tree decl = OMP_CLAUSE_DECL (c);
1942 /* Global variables don't need to be copied,
1943 the receiver side will use them directly. */
1944 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1945 continue;
1946 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1947 || !use_pointer_for_field (decl, ctx))
1948 continue;
1949 tree field = lookup_field (decl, ctx);
1950 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1951 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1952 continue;
1953 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1954 TREE_THIS_VOLATILE (field) = 0;
1955 DECL_USER_ALIGN (field) = 0;
1956 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1957 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1958 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1959 if (ctx->srecord_type)
1961 tree sfield = lookup_sfield (decl, ctx);
1962 TREE_TYPE (sfield) = TREE_TYPE (field);
1963 TREE_THIS_VOLATILE (sfield) = 0;
1964 DECL_USER_ALIGN (sfield) = 0;
1965 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1966 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1967 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1972 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1974 layout_type (ctx->record_type);
1975 fixup_child_record_type (ctx);
1977 else
1979 location_t loc = gimple_location (ctx->stmt);
1980 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1981 /* Move VLA fields to the end. */
1982 p = &TYPE_FIELDS (ctx->record_type);
1983 while (*p)
1984 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1985 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1987 *q = *p;
1988 *p = TREE_CHAIN (*p);
1989 TREE_CHAIN (*q) = NULL_TREE;
1990 q = &TREE_CHAIN (*q);
1992 else
1993 p = &DECL_CHAIN (*p);
1994 *p = vla_fields;
1995 if (gimple_omp_task_taskloop_p (ctx->stmt))
1997 /* Move fields corresponding to first and second _looptemp_
1998 clause first. There are filled by GOMP_taskloop
1999 and thus need to be in specific positions. */
2000 tree c1 = gimple_omp_task_clauses (ctx->stmt);
2001 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2002 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2003 OMP_CLAUSE__LOOPTEMP_);
2004 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2005 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2006 p = &TYPE_FIELDS (ctx->record_type);
2007 while (*p)
2008 if (*p == f1 || *p == f2)
2009 *p = DECL_CHAIN (*p);
2010 else
2011 p = &DECL_CHAIN (*p);
2012 DECL_CHAIN (f1) = f2;
2013 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2014 TYPE_FIELDS (ctx->record_type) = f1;
2015 if (ctx->srecord_type)
2017 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2018 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2019 p = &TYPE_FIELDS (ctx->srecord_type);
2020 while (*p)
2021 if (*p == f1 || *p == f2)
2022 *p = DECL_CHAIN (*p);
2023 else
2024 p = &DECL_CHAIN (*p);
2025 DECL_CHAIN (f1) = f2;
2026 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2027 TYPE_FIELDS (ctx->srecord_type) = f1;
2030 layout_type (ctx->record_type);
2031 fixup_child_record_type (ctx);
2032 if (ctx->srecord_type)
2033 layout_type (ctx->srecord_type);
2034 tree t = fold_convert_loc (loc, long_integer_type_node,
2035 TYPE_SIZE_UNIT (ctx->record_type));
2036 gimple_omp_task_set_arg_size (ctx->stmt, t);
2037 t = build_int_cst (long_integer_type_node,
2038 TYPE_ALIGN_UNIT (ctx->record_type));
2039 gimple_omp_task_set_arg_align (ctx->stmt, t);
2043 /* Find the enclosing offload context. */
2045 static omp_context *
2046 enclosing_target_ctx (omp_context *ctx)
2048 for (; ctx; ctx = ctx->outer)
2049 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2050 break;
2052 return ctx;
2055 /* Return true if ctx is part of an oacc kernels region. */
2057 static bool
2058 ctx_in_oacc_kernels_region (omp_context *ctx)
2060 for (;ctx != NULL; ctx = ctx->outer)
2062 gimple *stmt = ctx->stmt;
2063 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2064 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2065 return true;
2068 return false;
2071 /* Check the parallelism clauses inside a kernels regions.
2072 Until kernels handling moves to use the same loop indirection
2073 scheme as parallel, we need to do this checking early. */
2075 static unsigned
2076 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2078 bool checking = true;
2079 unsigned outer_mask = 0;
2080 unsigned this_mask = 0;
2081 bool has_seq = false, has_auto = false;
2083 if (ctx->outer)
2084 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2085 if (!stmt)
2087 checking = false;
2088 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2089 return outer_mask;
2090 stmt = as_a <gomp_for *> (ctx->stmt);
2093 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2095 switch (OMP_CLAUSE_CODE (c))
2097 case OMP_CLAUSE_GANG:
2098 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2099 break;
2100 case OMP_CLAUSE_WORKER:
2101 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2102 break;
2103 case OMP_CLAUSE_VECTOR:
2104 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2105 break;
2106 case OMP_CLAUSE_SEQ:
2107 has_seq = true;
2108 break;
2109 case OMP_CLAUSE_AUTO:
2110 has_auto = true;
2111 break;
2112 default:
2113 break;
2117 if (checking)
2119 if (has_seq && (this_mask || has_auto))
2120 error_at (gimple_location (stmt), "%<seq%> overrides other"
2121 " OpenACC loop specifiers");
2122 else if (has_auto && this_mask)
2123 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2124 " OpenACC loop specifiers");
2126 if (this_mask & outer_mask)
2127 error_at (gimple_location (stmt), "inner loop uses same"
2128 " OpenACC parallelism as containing loop");
2131 return outer_mask | this_mask;
2134 /* Scan a GIMPLE_OMP_FOR. */
2136 static omp_context *
2137 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2139 omp_context *ctx;
2140 size_t i;
2141 tree clauses = gimple_omp_for_clauses (stmt);
2143 ctx = new_omp_context (stmt, outer_ctx);
2145 if (is_gimple_omp_oacc (stmt))
2147 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2149 if (!tgt || is_oacc_parallel (tgt))
2150 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2152 char const *check = NULL;
2154 switch (OMP_CLAUSE_CODE (c))
2156 case OMP_CLAUSE_GANG:
2157 check = "gang";
2158 break;
2160 case OMP_CLAUSE_WORKER:
2161 check = "worker";
2162 break;
2164 case OMP_CLAUSE_VECTOR:
2165 check = "vector";
2166 break;
2168 default:
2169 break;
2172 if (check && OMP_CLAUSE_OPERAND (c, 0))
2173 error_at (gimple_location (stmt),
2174 "argument not permitted on %qs clause in"
2175 " OpenACC %<parallel%>", check);
2178 if (tgt && is_oacc_kernels (tgt))
2180 /* Strip out reductions, as they are not handled yet. */
2181 tree *prev_ptr = &clauses;
2183 while (tree probe = *prev_ptr)
2185 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2187 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2188 *prev_ptr = *next_ptr;
2189 else
2190 prev_ptr = next_ptr;
2193 gimple_omp_for_set_clauses (stmt, clauses);
2194 check_oacc_kernel_gwv (stmt, ctx);
2198 scan_sharing_clauses (clauses, ctx);
2200 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2201 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2203 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2204 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2205 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2206 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2208 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2209 return ctx;
2212 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2214 static void
2215 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2216 omp_context *outer_ctx)
2218 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2219 gsi_replace (gsi, bind, false);
2220 gimple_seq seq = NULL;
2221 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2222 tree cond = create_tmp_var_raw (integer_type_node);
2223 DECL_CONTEXT (cond) = current_function_decl;
2224 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2225 gimple_bind_set_vars (bind, cond);
2226 gimple_call_set_lhs (g, cond);
2227 gimple_seq_add_stmt (&seq, g);
2228 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2229 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2230 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2231 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2232 gimple_seq_add_stmt (&seq, g);
2233 g = gimple_build_label (lab1);
2234 gimple_seq_add_stmt (&seq, g);
2235 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2236 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2237 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2238 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2239 gimple_omp_for_set_clauses (new_stmt, clause);
2240 gimple_seq_add_stmt (&seq, new_stmt);
2241 g = gimple_build_goto (lab3);
2242 gimple_seq_add_stmt (&seq, g);
2243 g = gimple_build_label (lab2);
2244 gimple_seq_add_stmt (&seq, g);
2245 gimple_seq_add_stmt (&seq, stmt);
2246 g = gimple_build_label (lab3);
2247 gimple_seq_add_stmt (&seq, g);
2248 gimple_bind_set_body (bind, seq);
2249 update_stmt (bind);
2250 scan_omp_for (new_stmt, outer_ctx);
2251 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2254 /* Scan an OpenMP sections directive. */
2256 static void
2257 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2259 omp_context *ctx;
2261 ctx = new_omp_context (stmt, outer_ctx);
2262 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2263 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2266 /* Scan an OpenMP single directive. */
2268 static void
2269 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2271 omp_context *ctx;
2272 tree name;
2274 ctx = new_omp_context (stmt, outer_ctx);
2275 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2276 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2277 name = create_tmp_var_name (".omp_copy_s");
2278 name = build_decl (gimple_location (stmt),
2279 TYPE_DECL, name, ctx->record_type);
2280 TYPE_NAME (ctx->record_type) = name;
2282 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2283 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2285 if (TYPE_FIELDS (ctx->record_type) == NULL)
2286 ctx->record_type = NULL;
2287 else
2288 layout_type (ctx->record_type);
2291 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2292 used in the corresponding offloaded function are restrict. */
2294 static bool
2295 omp_target_base_pointers_restrict_p (tree clauses)
2297 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2298 used by OpenACC. */
2299 if (flag_openacc == 0)
2300 return false;
2302 /* I. Basic example:
2304 void foo (void)
2306 unsigned int a[2], b[2];
2308 #pragma acc kernels \
2309 copyout (a) \
2310 copyout (b)
2312 a[0] = 0;
2313 b[0] = 1;
2317 After gimplification, we have:
2319 #pragma omp target oacc_kernels \
2320 map(force_from:a [len: 8]) \
2321 map(force_from:b [len: 8])
2323 a[0] = 0;
2324 b[0] = 1;
2327 Because both mappings have the force prefix, we know that they will be
2328 allocated when calling the corresponding offloaded function, which means we
2329 can mark the base pointers for a and b in the offloaded function as
2330 restrict. */
2332 tree c;
2333 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2335 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2336 return false;
2338 switch (OMP_CLAUSE_MAP_KIND (c))
2340 case GOMP_MAP_FORCE_ALLOC:
2341 case GOMP_MAP_FORCE_TO:
2342 case GOMP_MAP_FORCE_FROM:
2343 case GOMP_MAP_FORCE_TOFROM:
2344 break;
2345 default:
2346 return false;
2350 return true;
2353 /* Scan a GIMPLE_OMP_TARGET. */
2355 static void
2356 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2358 omp_context *ctx;
2359 tree name;
2360 bool offloaded = is_gimple_omp_offloaded (stmt);
2361 tree clauses = gimple_omp_target_clauses (stmt);
2363 ctx = new_omp_context (stmt, outer_ctx);
2364 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2365 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2366 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2367 name = create_tmp_var_name (".omp_data_t");
2368 name = build_decl (gimple_location (stmt),
2369 TYPE_DECL, name, ctx->record_type);
2370 DECL_ARTIFICIAL (name) = 1;
2371 DECL_NAMELESS (name) = 1;
2372 TYPE_NAME (ctx->record_type) = name;
2373 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2375 bool base_pointers_restrict = false;
2376 if (offloaded)
2378 create_omp_child_function (ctx, false);
2379 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2381 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2382 if (base_pointers_restrict
2383 && dump_file && (dump_flags & TDF_DETAILS))
2384 fprintf (dump_file,
2385 "Base pointers in offloaded function are restrict\n");
2388 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2389 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2391 if (TYPE_FIELDS (ctx->record_type) == NULL)
2392 ctx->record_type = ctx->receiver_decl = NULL;
2393 else
2395 TYPE_FIELDS (ctx->record_type)
2396 = nreverse (TYPE_FIELDS (ctx->record_type));
2397 if (flag_checking)
2399 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2400 for (tree field = TYPE_FIELDS (ctx->record_type);
2401 field;
2402 field = DECL_CHAIN (field))
2403 gcc_assert (DECL_ALIGN (field) == align);
2405 layout_type (ctx->record_type);
2406 if (offloaded)
2407 fixup_child_record_type (ctx);
2411 /* Scan an OpenMP teams directive. */
2413 static void
2414 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2416 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2417 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2418 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2421 /* Check nesting restrictions. */
2422 static bool
2423 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2425 tree c;
2427 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2428 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2429 the original copy of its contents. */
2430 return true;
2432 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2433 inside an OpenACC CTX. */
2434 if (!(is_gimple_omp (stmt)
2435 && is_gimple_omp_oacc (stmt))
2436 /* Except for atomic codes that we share with OpenMP. */
2437 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2438 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2440 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2442 error_at (gimple_location (stmt),
2443 "non-OpenACC construct inside of OpenACC routine");
2444 return false;
2446 else
2447 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2448 if (is_gimple_omp (octx->stmt)
2449 && is_gimple_omp_oacc (octx->stmt))
2451 error_at (gimple_location (stmt),
2452 "non-OpenACC construct inside of OpenACC region");
2453 return false;
2457 if (ctx != NULL)
2459 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2460 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2462 c = NULL_TREE;
2463 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2465 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2466 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2468 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2469 && (ctx->outer == NULL
2470 || !gimple_omp_for_combined_into_p (ctx->stmt)
2471 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2472 || (gimple_omp_for_kind (ctx->outer->stmt)
2473 != GF_OMP_FOR_KIND_FOR)
2474 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2476 error_at (gimple_location (stmt),
2477 "%<ordered simd threads%> must be closely "
2478 "nested inside of %<for simd%> region");
2479 return false;
2481 return true;
2484 error_at (gimple_location (stmt),
2485 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2486 " may not be nested inside %<simd%> region");
2487 return false;
2489 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2491 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2492 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2493 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2494 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2496 error_at (gimple_location (stmt),
2497 "only %<distribute%> or %<parallel%> regions are "
2498 "allowed to be strictly nested inside %<teams%> "
2499 "region");
2500 return false;
2504 switch (gimple_code (stmt))
2506 case GIMPLE_OMP_FOR:
2507 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2508 return true;
2509 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2511 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2513 error_at (gimple_location (stmt),
2514 "%<distribute%> region must be strictly nested "
2515 "inside %<teams%> construct");
2516 return false;
2518 return true;
2520 /* We split taskloop into task and nested taskloop in it. */
2521 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2522 return true;
2523 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2525 bool ok = false;
2527 if (ctx)
2528 switch (gimple_code (ctx->stmt))
2530 case GIMPLE_OMP_FOR:
2531 ok = (gimple_omp_for_kind (ctx->stmt)
2532 == GF_OMP_FOR_KIND_OACC_LOOP);
2533 break;
2535 case GIMPLE_OMP_TARGET:
2536 switch (gimple_omp_target_kind (ctx->stmt))
2538 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2539 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2540 ok = true;
2541 break;
2543 default:
2544 break;
2547 default:
2548 break;
2550 else if (oacc_get_fn_attrib (current_function_decl))
2551 ok = true;
2552 if (!ok)
2554 error_at (gimple_location (stmt),
2555 "OpenACC loop directive must be associated with"
2556 " an OpenACC compute region");
2557 return false;
2560 /* FALLTHRU */
2561 case GIMPLE_CALL:
2562 if (is_gimple_call (stmt)
2563 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2564 == BUILT_IN_GOMP_CANCEL
2565 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2566 == BUILT_IN_GOMP_CANCELLATION_POINT))
2568 const char *bad = NULL;
2569 const char *kind = NULL;
2570 const char *construct
2571 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2572 == BUILT_IN_GOMP_CANCEL)
2573 ? "#pragma omp cancel"
2574 : "#pragma omp cancellation point";
2575 if (ctx == NULL)
2577 error_at (gimple_location (stmt), "orphaned %qs construct",
2578 construct);
2579 return false;
2581 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2582 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2583 : 0)
2585 case 1:
2586 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2587 bad = "#pragma omp parallel";
2588 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2589 == BUILT_IN_GOMP_CANCEL
2590 && !integer_zerop (gimple_call_arg (stmt, 1)))
2591 ctx->cancellable = true;
2592 kind = "parallel";
2593 break;
2594 case 2:
2595 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2596 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2597 bad = "#pragma omp for";
2598 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2599 == BUILT_IN_GOMP_CANCEL
2600 && !integer_zerop (gimple_call_arg (stmt, 1)))
2602 ctx->cancellable = true;
2603 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2604 OMP_CLAUSE_NOWAIT))
2605 warning_at (gimple_location (stmt), 0,
2606 "%<#pragma omp cancel for%> inside "
2607 "%<nowait%> for construct");
2608 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2609 OMP_CLAUSE_ORDERED))
2610 warning_at (gimple_location (stmt), 0,
2611 "%<#pragma omp cancel for%> inside "
2612 "%<ordered%> for construct");
2614 kind = "for";
2615 break;
2616 case 4:
2617 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2618 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2619 bad = "#pragma omp sections";
2620 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2621 == BUILT_IN_GOMP_CANCEL
2622 && !integer_zerop (gimple_call_arg (stmt, 1)))
2624 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2626 ctx->cancellable = true;
2627 if (omp_find_clause (gimple_omp_sections_clauses
2628 (ctx->stmt),
2629 OMP_CLAUSE_NOWAIT))
2630 warning_at (gimple_location (stmt), 0,
2631 "%<#pragma omp cancel sections%> inside "
2632 "%<nowait%> sections construct");
2634 else
2636 gcc_assert (ctx->outer
2637 && gimple_code (ctx->outer->stmt)
2638 == GIMPLE_OMP_SECTIONS);
2639 ctx->outer->cancellable = true;
2640 if (omp_find_clause (gimple_omp_sections_clauses
2641 (ctx->outer->stmt),
2642 OMP_CLAUSE_NOWAIT))
2643 warning_at (gimple_location (stmt), 0,
2644 "%<#pragma omp cancel sections%> inside "
2645 "%<nowait%> sections construct");
2648 kind = "sections";
2649 break;
2650 case 8:
2651 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2652 bad = "#pragma omp task";
2653 else
2655 for (omp_context *octx = ctx->outer;
2656 octx; octx = octx->outer)
2658 switch (gimple_code (octx->stmt))
2660 case GIMPLE_OMP_TASKGROUP:
2661 break;
2662 case GIMPLE_OMP_TARGET:
2663 if (gimple_omp_target_kind (octx->stmt)
2664 != GF_OMP_TARGET_KIND_REGION)
2665 continue;
2666 /* FALLTHRU */
2667 case GIMPLE_OMP_PARALLEL:
2668 case GIMPLE_OMP_TEAMS:
2669 error_at (gimple_location (stmt),
2670 "%<%s taskgroup%> construct not closely "
2671 "nested inside of %<taskgroup%> region",
2672 construct);
2673 return false;
2674 default:
2675 continue;
2677 break;
2679 ctx->cancellable = true;
2681 kind = "taskgroup";
2682 break;
2683 default:
2684 error_at (gimple_location (stmt), "invalid arguments");
2685 return false;
2687 if (bad)
2689 error_at (gimple_location (stmt),
2690 "%<%s %s%> construct not closely nested inside of %qs",
2691 construct, kind, bad);
2692 return false;
2695 /* FALLTHRU */
2696 case GIMPLE_OMP_SECTIONS:
2697 case GIMPLE_OMP_SINGLE:
2698 for (; ctx != NULL; ctx = ctx->outer)
2699 switch (gimple_code (ctx->stmt))
2701 case GIMPLE_OMP_FOR:
2702 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2703 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2704 break;
2705 /* FALLTHRU */
2706 case GIMPLE_OMP_SECTIONS:
2707 case GIMPLE_OMP_SINGLE:
2708 case GIMPLE_OMP_ORDERED:
2709 case GIMPLE_OMP_MASTER:
2710 case GIMPLE_OMP_TASK:
2711 case GIMPLE_OMP_CRITICAL:
2712 if (is_gimple_call (stmt))
2714 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2715 != BUILT_IN_GOMP_BARRIER)
2716 return true;
2717 error_at (gimple_location (stmt),
2718 "barrier region may not be closely nested inside "
2719 "of work-sharing, %<critical%>, %<ordered%>, "
2720 "%<master%>, explicit %<task%> or %<taskloop%> "
2721 "region");
2722 return false;
2724 error_at (gimple_location (stmt),
2725 "work-sharing region may not be closely nested inside "
2726 "of work-sharing, %<critical%>, %<ordered%>, "
2727 "%<master%>, explicit %<task%> or %<taskloop%> region");
2728 return false;
2729 case GIMPLE_OMP_PARALLEL:
2730 case GIMPLE_OMP_TEAMS:
2731 return true;
2732 case GIMPLE_OMP_TARGET:
2733 if (gimple_omp_target_kind (ctx->stmt)
2734 == GF_OMP_TARGET_KIND_REGION)
2735 return true;
2736 break;
2737 default:
2738 break;
2740 break;
2741 case GIMPLE_OMP_MASTER:
2742 for (; ctx != NULL; ctx = ctx->outer)
2743 switch (gimple_code (ctx->stmt))
2745 case GIMPLE_OMP_FOR:
2746 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2747 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2748 break;
2749 /* FALLTHRU */
2750 case GIMPLE_OMP_SECTIONS:
2751 case GIMPLE_OMP_SINGLE:
2752 case GIMPLE_OMP_TASK:
2753 error_at (gimple_location (stmt),
2754 "%<master%> region may not be closely nested inside "
2755 "of work-sharing, explicit %<task%> or %<taskloop%> "
2756 "region");
2757 return false;
2758 case GIMPLE_OMP_PARALLEL:
2759 case GIMPLE_OMP_TEAMS:
2760 return true;
2761 case GIMPLE_OMP_TARGET:
2762 if (gimple_omp_target_kind (ctx->stmt)
2763 == GF_OMP_TARGET_KIND_REGION)
2764 return true;
2765 break;
2766 default:
2767 break;
2769 break;
2770 case GIMPLE_OMP_TASK:
2771 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2772 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2773 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2774 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2776 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2777 error_at (OMP_CLAUSE_LOCATION (c),
2778 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2779 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2780 return false;
2782 break;
2783 case GIMPLE_OMP_ORDERED:
2784 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2785 c; c = OMP_CLAUSE_CHAIN (c))
2787 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2789 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2790 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2791 continue;
2793 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2794 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2795 || kind == OMP_CLAUSE_DEPEND_SINK)
2797 tree oclause;
2798 /* Look for containing ordered(N) loop. */
2799 if (ctx == NULL
2800 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2801 || (oclause
2802 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2803 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2805 error_at (OMP_CLAUSE_LOCATION (c),
2806 "%<ordered%> construct with %<depend%> clause "
2807 "must be closely nested inside an %<ordered%> "
2808 "loop");
2809 return false;
2811 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2813 error_at (OMP_CLAUSE_LOCATION (c),
2814 "%<ordered%> construct with %<depend%> clause "
2815 "must be closely nested inside a loop with "
2816 "%<ordered%> clause with a parameter");
2817 return false;
2820 else
2822 error_at (OMP_CLAUSE_LOCATION (c),
2823 "invalid depend kind in omp %<ordered%> %<depend%>");
2824 return false;
2827 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2828 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2830 /* ordered simd must be closely nested inside of simd region,
2831 and simd region must not encounter constructs other than
2832 ordered simd, therefore ordered simd may be either orphaned,
2833 or ctx->stmt must be simd. The latter case is handled already
2834 earlier. */
2835 if (ctx != NULL)
2837 error_at (gimple_location (stmt),
2838 "%<ordered%> %<simd%> must be closely nested inside "
2839 "%<simd%> region");
2840 return false;
2843 for (; ctx != NULL; ctx = ctx->outer)
2844 switch (gimple_code (ctx->stmt))
2846 case GIMPLE_OMP_CRITICAL:
2847 case GIMPLE_OMP_TASK:
2848 case GIMPLE_OMP_ORDERED:
2849 ordered_in_taskloop:
2850 error_at (gimple_location (stmt),
2851 "%<ordered%> region may not be closely nested inside "
2852 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2853 "%<taskloop%> region");
2854 return false;
2855 case GIMPLE_OMP_FOR:
2856 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2857 goto ordered_in_taskloop;
2858 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2859 OMP_CLAUSE_ORDERED) == NULL)
2861 error_at (gimple_location (stmt),
2862 "%<ordered%> region must be closely nested inside "
2863 "a loop region with an %<ordered%> clause");
2864 return false;
2866 return true;
2867 case GIMPLE_OMP_TARGET:
2868 if (gimple_omp_target_kind (ctx->stmt)
2869 != GF_OMP_TARGET_KIND_REGION)
2870 break;
2871 /* FALLTHRU */
2872 case GIMPLE_OMP_PARALLEL:
2873 case GIMPLE_OMP_TEAMS:
2874 error_at (gimple_location (stmt),
2875 "%<ordered%> region must be closely nested inside "
2876 "a loop region with an %<ordered%> clause");
2877 return false;
2878 default:
2879 break;
2881 break;
2882 case GIMPLE_OMP_CRITICAL:
2884 tree this_stmt_name
2885 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2886 for (; ctx != NULL; ctx = ctx->outer)
2887 if (gomp_critical *other_crit
2888 = dyn_cast <gomp_critical *> (ctx->stmt))
2889 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2891 error_at (gimple_location (stmt),
2892 "%<critical%> region may not be nested inside "
2893 "a %<critical%> region with the same name");
2894 return false;
2897 break;
2898 case GIMPLE_OMP_TEAMS:
2899 if (ctx == NULL
2900 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2901 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2903 error_at (gimple_location (stmt),
2904 "%<teams%> construct not closely nested inside of "
2905 "%<target%> construct");
2906 return false;
2908 break;
2909 case GIMPLE_OMP_TARGET:
2910 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2911 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2912 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2913 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2915 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2916 error_at (OMP_CLAUSE_LOCATION (c),
2917 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2918 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2919 return false;
2921 if (is_gimple_omp_offloaded (stmt)
2922 && oacc_get_fn_attrib (cfun->decl) != NULL)
2924 error_at (gimple_location (stmt),
2925 "OpenACC region inside of OpenACC routine, nested "
2926 "parallelism not supported yet");
2927 return false;
2929 for (; ctx != NULL; ctx = ctx->outer)
2931 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2933 if (is_gimple_omp (stmt)
2934 && is_gimple_omp_oacc (stmt)
2935 && is_gimple_omp (ctx->stmt))
2937 error_at (gimple_location (stmt),
2938 "OpenACC construct inside of non-OpenACC region");
2939 return false;
2941 continue;
2944 const char *stmt_name, *ctx_stmt_name;
2945 switch (gimple_omp_target_kind (stmt))
2947 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2948 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2949 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2950 case GF_OMP_TARGET_KIND_ENTER_DATA:
2951 stmt_name = "target enter data"; break;
2952 case GF_OMP_TARGET_KIND_EXIT_DATA:
2953 stmt_name = "target exit data"; break;
2954 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2955 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2956 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2957 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2958 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2959 stmt_name = "enter/exit data"; break;
2960 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2961 break;
2962 default: gcc_unreachable ();
2964 switch (gimple_omp_target_kind (ctx->stmt))
2966 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2967 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2968 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2969 ctx_stmt_name = "parallel"; break;
2970 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2971 ctx_stmt_name = "kernels"; break;
2972 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2973 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2974 ctx_stmt_name = "host_data"; break;
2975 default: gcc_unreachable ();
2978 /* OpenACC/OpenMP mismatch? */
2979 if (is_gimple_omp_oacc (stmt)
2980 != is_gimple_omp_oacc (ctx->stmt))
2982 error_at (gimple_location (stmt),
2983 "%s %qs construct inside of %s %qs region",
2984 (is_gimple_omp_oacc (stmt)
2985 ? "OpenACC" : "OpenMP"), stmt_name,
2986 (is_gimple_omp_oacc (ctx->stmt)
2987 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2988 return false;
2990 if (is_gimple_omp_offloaded (ctx->stmt))
2992 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2993 if (is_gimple_omp_oacc (ctx->stmt))
2995 error_at (gimple_location (stmt),
2996 "%qs construct inside of %qs region",
2997 stmt_name, ctx_stmt_name);
2998 return false;
3000 else
3002 warning_at (gimple_location (stmt), 0,
3003 "%qs construct inside of %qs region",
3004 stmt_name, ctx_stmt_name);
3008 break;
3009 default:
3010 break;
3012 return true;
3016 /* Helper function scan_omp.
3018 Callback for walk_tree or operators in walk_gimple_stmt used to
3019 scan for OMP directives in TP. */
3021 static tree
3022 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3024 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3025 omp_context *ctx = (omp_context *) wi->info;
3026 tree t = *tp;
3028 switch (TREE_CODE (t))
3030 case VAR_DECL:
3031 case PARM_DECL:
3032 case LABEL_DECL:
3033 case RESULT_DECL:
3034 if (ctx)
3036 tree repl = remap_decl (t, &ctx->cb);
3037 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3038 *tp = repl;
3040 break;
3042 default:
3043 if (ctx && TYPE_P (t))
3044 *tp = remap_type (t, &ctx->cb);
3045 else if (!DECL_P (t))
3047 *walk_subtrees = 1;
3048 if (ctx)
3050 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3051 if (tem != TREE_TYPE (t))
3053 if (TREE_CODE (t) == INTEGER_CST)
3054 *tp = wide_int_to_tree (tem, t);
3055 else
3056 TREE_TYPE (t) = tem;
3060 break;
3063 return NULL_TREE;
3066 /* Return true if FNDECL is a setjmp or a longjmp. */
3068 static bool
3069 setjmp_or_longjmp_p (const_tree fndecl)
3071 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3072 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3073 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3074 return true;
3076 tree declname = DECL_NAME (fndecl);
3077 if (!declname)
3078 return false;
3079 const char *name = IDENTIFIER_POINTER (declname);
3080 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3084 /* Helper function for scan_omp.
3086 Callback for walk_gimple_stmt used to scan for OMP directives in
3087 the current statement in GSI. */
3089 static tree
3090 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3091 struct walk_stmt_info *wi)
3093 gimple *stmt = gsi_stmt (*gsi);
3094 omp_context *ctx = (omp_context *) wi->info;
3096 if (gimple_has_location (stmt))
3097 input_location = gimple_location (stmt);
3099 /* Check the nesting restrictions. */
3100 bool remove = false;
3101 if (is_gimple_omp (stmt))
3102 remove = !check_omp_nesting_restrictions (stmt, ctx);
3103 else if (is_gimple_call (stmt))
3105 tree fndecl = gimple_call_fndecl (stmt);
3106 if (fndecl)
3108 if (setjmp_or_longjmp_p (fndecl)
3109 && ctx
3110 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3111 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3113 remove = true;
3114 error_at (gimple_location (stmt),
3115 "setjmp/longjmp inside simd construct");
3117 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3118 switch (DECL_FUNCTION_CODE (fndecl))
3120 case BUILT_IN_GOMP_BARRIER:
3121 case BUILT_IN_GOMP_CANCEL:
3122 case BUILT_IN_GOMP_CANCELLATION_POINT:
3123 case BUILT_IN_GOMP_TASKYIELD:
3124 case BUILT_IN_GOMP_TASKWAIT:
3125 case BUILT_IN_GOMP_TASKGROUP_START:
3126 case BUILT_IN_GOMP_TASKGROUP_END:
3127 remove = !check_omp_nesting_restrictions (stmt, ctx);
3128 break;
3129 default:
3130 break;
3134 if (remove)
3136 stmt = gimple_build_nop ();
3137 gsi_replace (gsi, stmt, false);
3140 *handled_ops_p = true;
3142 switch (gimple_code (stmt))
3144 case GIMPLE_OMP_PARALLEL:
3145 taskreg_nesting_level++;
3146 scan_omp_parallel (gsi, ctx);
3147 taskreg_nesting_level--;
3148 break;
3150 case GIMPLE_OMP_TASK:
3151 taskreg_nesting_level++;
3152 scan_omp_task (gsi, ctx);
3153 taskreg_nesting_level--;
3154 break;
3156 case GIMPLE_OMP_FOR:
3157 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3158 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3159 && omp_maybe_offloaded_ctx (ctx)
3160 && omp_max_simt_vf ())
3161 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3162 else
3163 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3164 break;
3166 case GIMPLE_OMP_SECTIONS:
3167 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3168 break;
3170 case GIMPLE_OMP_SINGLE:
3171 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3172 break;
3174 case GIMPLE_OMP_SECTION:
3175 case GIMPLE_OMP_MASTER:
3176 case GIMPLE_OMP_TASKGROUP:
3177 case GIMPLE_OMP_ORDERED:
3178 case GIMPLE_OMP_CRITICAL:
3179 case GIMPLE_OMP_GRID_BODY:
3180 ctx = new_omp_context (stmt, ctx);
3181 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3182 break;
3184 case GIMPLE_OMP_TARGET:
3185 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3186 break;
3188 case GIMPLE_OMP_TEAMS:
3189 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3190 break;
3192 case GIMPLE_BIND:
3194 tree var;
3196 *handled_ops_p = false;
3197 if (ctx)
3198 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3199 var ;
3200 var = DECL_CHAIN (var))
3201 insert_decl_map (&ctx->cb, var, var);
3203 break;
3204 default:
3205 *handled_ops_p = false;
3206 break;
3209 return NULL_TREE;
3213 /* Scan all the statements starting at the current statement. CTX
3214 contains context information about the OMP directives and
3215 clauses found during the scan. */
3217 static void
3218 scan_omp (gimple_seq *body_p, omp_context *ctx)
3220 location_t saved_location;
3221 struct walk_stmt_info wi;
3223 memset (&wi, 0, sizeof (wi));
3224 wi.info = ctx;
3225 wi.want_locations = true;
3227 saved_location = input_location;
3228 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3229 input_location = saved_location;
3232 /* Re-gimplification and code generation routines. */
3234 /* If a context was created for STMT when it was scanned, return it. */
3236 static omp_context *
3237 maybe_lookup_ctx (gimple *stmt)
3239 splay_tree_node n;
3240 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3241 return n ? (omp_context *) n->value : NULL;
3245 /* Find the mapping for DECL in CTX or the immediately enclosing
3246 context that has a mapping for DECL.
3248 If CTX is a nested parallel directive, we may have to use the decl
3249 mappings created in CTX's parent context. Suppose that we have the
3250 following parallel nesting (variable UIDs showed for clarity):
3252 iD.1562 = 0;
3253 #omp parallel shared(iD.1562) -> outer parallel
3254 iD.1562 = iD.1562 + 1;
3256 #omp parallel shared (iD.1562) -> inner parallel
3257 iD.1562 = iD.1562 - 1;
3259 Each parallel structure will create a distinct .omp_data_s structure
3260 for copying iD.1562 in/out of the directive:
3262 outer parallel .omp_data_s.1.i -> iD.1562
3263 inner parallel .omp_data_s.2.i -> iD.1562
3265 A shared variable mapping will produce a copy-out operation before
3266 the parallel directive and a copy-in operation after it. So, in
3267 this case we would have:
3269 iD.1562 = 0;
3270 .omp_data_o.1.i = iD.1562;
3271 #omp parallel shared(iD.1562) -> outer parallel
3272 .omp_data_i.1 = &.omp_data_o.1
3273 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3275 .omp_data_o.2.i = iD.1562; -> **
3276 #omp parallel shared(iD.1562) -> inner parallel
3277 .omp_data_i.2 = &.omp_data_o.2
3278 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3281 ** This is a problem. The symbol iD.1562 cannot be referenced
3282 inside the body of the outer parallel region. But since we are
3283 emitting this copy operation while expanding the inner parallel
3284 directive, we need to access the CTX structure of the outer
3285 parallel directive to get the correct mapping:
3287 .omp_data_o.2.i = .omp_data_i.1->i
3289 Since there may be other workshare or parallel directives enclosing
3290 the parallel directive, it may be necessary to walk up the context
3291 parent chain. This is not a problem in general because nested
3292 parallelism happens only rarely. */
3294 static tree
3295 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3297 tree t;
3298 omp_context *up;
3300 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3301 t = maybe_lookup_decl (decl, up);
3303 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3305 return t ? t : decl;
3309 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3310 in outer contexts. */
3312 static tree
3313 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3315 tree t = NULL;
3316 omp_context *up;
3318 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3319 t = maybe_lookup_decl (decl, up);
3321 return t ? t : decl;
3325 /* Construct the initialization value for reduction operation OP. */
3327 tree
3328 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3330 switch (op)
3332 case PLUS_EXPR:
3333 case MINUS_EXPR:
3334 case BIT_IOR_EXPR:
3335 case BIT_XOR_EXPR:
3336 case TRUTH_OR_EXPR:
3337 case TRUTH_ORIF_EXPR:
3338 case TRUTH_XOR_EXPR:
3339 case NE_EXPR:
3340 return build_zero_cst (type);
3342 case MULT_EXPR:
3343 case TRUTH_AND_EXPR:
3344 case TRUTH_ANDIF_EXPR:
3345 case EQ_EXPR:
3346 return fold_convert_loc (loc, type, integer_one_node);
3348 case BIT_AND_EXPR:
3349 return fold_convert_loc (loc, type, integer_minus_one_node);
3351 case MAX_EXPR:
3352 if (SCALAR_FLOAT_TYPE_P (type))
3354 REAL_VALUE_TYPE max, min;
3355 if (HONOR_INFINITIES (type))
3357 real_inf (&max);
3358 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3360 else
3361 real_maxval (&min, 1, TYPE_MODE (type));
3362 return build_real (type, min);
3364 else if (POINTER_TYPE_P (type))
3366 wide_int min
3367 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3368 return wide_int_to_tree (type, min);
3370 else
3372 gcc_assert (INTEGRAL_TYPE_P (type));
3373 return TYPE_MIN_VALUE (type);
3376 case MIN_EXPR:
3377 if (SCALAR_FLOAT_TYPE_P (type))
3379 REAL_VALUE_TYPE max;
3380 if (HONOR_INFINITIES (type))
3381 real_inf (&max);
3382 else
3383 real_maxval (&max, 0, TYPE_MODE (type));
3384 return build_real (type, max);
3386 else if (POINTER_TYPE_P (type))
3388 wide_int max
3389 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3390 return wide_int_to_tree (type, max);
3392 else
3394 gcc_assert (INTEGRAL_TYPE_P (type));
3395 return TYPE_MAX_VALUE (type);
3398 default:
3399 gcc_unreachable ();
3403 /* Construct the initialization value for reduction CLAUSE. */
3405 tree
3406 omp_reduction_init (tree clause, tree type)
3408 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3409 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3412 /* Return alignment to be assumed for var in CLAUSE, which should be
3413 OMP_CLAUSE_ALIGNED. */
3415 static tree
3416 omp_clause_aligned_alignment (tree clause)
3418 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3419 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3421 /* Otherwise return implementation defined alignment. */
3422 unsigned int al = 1;
3423 machine_mode mode, vmode;
3424 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3425 if (vs)
3426 vs = 1 << floor_log2 (vs);
3427 static enum mode_class classes[]
3428 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3429 for (int i = 0; i < 4; i += 2)
3430 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3431 mode != VOIDmode;
3432 mode = GET_MODE_WIDER_MODE (mode))
3434 vmode = targetm.vectorize.preferred_simd_mode (mode);
3435 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3436 continue;
3437 while (vs
3438 && GET_MODE_SIZE (vmode) < vs
3439 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3440 vmode = GET_MODE_2XWIDER_MODE (vmode);
3442 tree type = lang_hooks.types.type_for_mode (mode, 1);
3443 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3444 continue;
3445 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3446 / GET_MODE_SIZE (mode));
3447 if (TYPE_MODE (type) != vmode)
3448 continue;
3449 if (TYPE_ALIGN_UNIT (type) > al)
3450 al = TYPE_ALIGN_UNIT (type);
3452 return build_int_cst (integer_type_node, al);
3456 /* This structure is part of the interface between lower_rec_simd_input_clauses
3457 and lower_rec_input_clauses. */
3459 struct omplow_simd_context {
3460 tree idx;
3461 tree lane;
3462 vec<tree, va_heap> simt_eargs;
3463 gimple_seq simt_dlist;
3464 int max_vf;
3465 bool is_simt;
3468 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3469 privatization. */
3471 static bool
3472 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3473 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3475 if (sctx->max_vf == 0)
3477 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3478 if (sctx->max_vf > 1)
3480 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3481 OMP_CLAUSE_SAFELEN);
3482 if (c
3483 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3484 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3485 sctx->max_vf = 1;
3486 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3487 sctx->max_vf) == -1)
3488 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3490 if (sctx->max_vf > 1)
3492 sctx->idx = create_tmp_var (unsigned_type_node);
3493 sctx->lane = create_tmp_var (unsigned_type_node);
3496 if (sctx->max_vf == 1)
3497 return false;
3499 if (sctx->is_simt)
3501 if (is_gimple_reg (new_var))
3503 ivar = lvar = new_var;
3504 return true;
3506 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3507 ivar = lvar = create_tmp_var (type);
3508 TREE_ADDRESSABLE (ivar) = 1;
3509 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3510 NULL, DECL_ATTRIBUTES (ivar));
3511 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3512 tree clobber = build_constructor (type, NULL);
3513 TREE_THIS_VOLATILE (clobber) = 1;
3514 gimple *g = gimple_build_assign (ivar, clobber);
3515 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3517 else
3519 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3520 tree avar = create_tmp_var_raw (atype);
3521 if (TREE_ADDRESSABLE (new_var))
3522 TREE_ADDRESSABLE (avar) = 1;
3523 DECL_ATTRIBUTES (avar)
3524 = tree_cons (get_identifier ("omp simd array"), NULL,
3525 DECL_ATTRIBUTES (avar));
3526 gimple_add_tmp_var (avar);
3527 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3528 NULL_TREE, NULL_TREE);
3529 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3530 NULL_TREE, NULL_TREE);
3532 if (DECL_P (new_var))
3534 SET_DECL_VALUE_EXPR (new_var, lvar);
3535 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3537 return true;
3540 /* Helper function of lower_rec_input_clauses. For a reference
3541 in simd reduction, add an underlying variable it will reference. */
3543 static void
3544 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3546 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3547 if (TREE_CONSTANT (z))
3549 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3550 get_name (new_vard));
3551 gimple_add_tmp_var (z);
3552 TREE_ADDRESSABLE (z) = 1;
3553 z = build_fold_addr_expr_loc (loc, z);
3554 gimplify_assign (new_vard, z, ilist);
3558 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3559 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3560 private variables. Initialization statements go in ILIST, while calls
3561 to destructors go in DLIST. */
3563 static void
3564 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3565 omp_context *ctx, struct omp_for_data *fd)
3567 tree c, dtor, copyin_seq, x, ptr;
3568 bool copyin_by_ref = false;
3569 bool lastprivate_firstprivate = false;
3570 bool reduction_omp_orig_ref = false;
3571 int pass;
3572 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3573 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3574 omplow_simd_context sctx = omplow_simd_context ();
3575 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3576 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3577 gimple_seq llist[3] = { };
3579 copyin_seq = NULL;
3580 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3582 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3583 with data sharing clauses referencing variable sized vars. That
3584 is unnecessarily hard to support and very unlikely to result in
3585 vectorized code anyway. */
3586 if (is_simd)
3587 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3588 switch (OMP_CLAUSE_CODE (c))
3590 case OMP_CLAUSE_LINEAR:
3591 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3592 sctx.max_vf = 1;
3593 /* FALLTHRU */
3594 case OMP_CLAUSE_PRIVATE:
3595 case OMP_CLAUSE_FIRSTPRIVATE:
3596 case OMP_CLAUSE_LASTPRIVATE:
3597 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3598 sctx.max_vf = 1;
3599 break;
3600 case OMP_CLAUSE_REDUCTION:
3601 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3602 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3603 sctx.max_vf = 1;
3604 break;
3605 default:
3606 continue;
3609 /* Add a placeholder for simduid. */
3610 if (sctx.is_simt && sctx.max_vf != 1)
3611 sctx.simt_eargs.safe_push (NULL_TREE);
3613 /* Do all the fixed sized types in the first pass, and the variable sized
3614 types in the second pass. This makes sure that the scalar arguments to
3615 the variable sized types are processed before we use them in the
3616 variable sized operations. */
3617 for (pass = 0; pass < 2; ++pass)
3619 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3621 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3622 tree var, new_var;
3623 bool by_ref;
3624 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3626 switch (c_kind)
3628 case OMP_CLAUSE_PRIVATE:
3629 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3630 continue;
3631 break;
3632 case OMP_CLAUSE_SHARED:
3633 /* Ignore shared directives in teams construct. */
3634 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3635 continue;
3636 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3638 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3639 || is_global_var (OMP_CLAUSE_DECL (c)));
3640 continue;
3642 case OMP_CLAUSE_FIRSTPRIVATE:
3643 case OMP_CLAUSE_COPYIN:
3644 break;
3645 case OMP_CLAUSE_LINEAR:
3646 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3647 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3648 lastprivate_firstprivate = true;
3649 break;
3650 case OMP_CLAUSE_REDUCTION:
3651 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3652 reduction_omp_orig_ref = true;
3653 break;
3654 case OMP_CLAUSE__LOOPTEMP_:
3655 /* Handle _looptemp_ clauses only on parallel/task. */
3656 if (fd)
3657 continue;
3658 break;
3659 case OMP_CLAUSE_LASTPRIVATE:
3660 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3662 lastprivate_firstprivate = true;
3663 if (pass != 0 || is_taskloop_ctx (ctx))
3664 continue;
3666 /* Even without corresponding firstprivate, if
3667 decl is Fortran allocatable, it needs outer var
3668 reference. */
3669 else if (pass == 0
3670 && lang_hooks.decls.omp_private_outer_ref
3671 (OMP_CLAUSE_DECL (c)))
3672 lastprivate_firstprivate = true;
3673 break;
3674 case OMP_CLAUSE_ALIGNED:
3675 if (pass == 0)
3676 continue;
3677 var = OMP_CLAUSE_DECL (c);
3678 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3679 && !is_global_var (var))
3681 new_var = maybe_lookup_decl (var, ctx);
3682 if (new_var == NULL_TREE)
3683 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3684 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3685 tree alarg = omp_clause_aligned_alignment (c);
3686 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3687 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3688 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3689 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3690 gimplify_and_add (x, ilist);
3692 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3693 && is_global_var (var))
3695 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3696 new_var = lookup_decl (var, ctx);
3697 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3698 t = build_fold_addr_expr_loc (clause_loc, t);
3699 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3700 tree alarg = omp_clause_aligned_alignment (c);
3701 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3702 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3703 t = fold_convert_loc (clause_loc, ptype, t);
3704 x = create_tmp_var (ptype);
3705 t = build2 (MODIFY_EXPR, ptype, x, t);
3706 gimplify_and_add (t, ilist);
3707 t = build_simple_mem_ref_loc (clause_loc, x);
3708 SET_DECL_VALUE_EXPR (new_var, t);
3709 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3711 continue;
3712 default:
3713 continue;
3716 new_var = var = OMP_CLAUSE_DECL (c);
3717 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3719 var = TREE_OPERAND (var, 0);
3720 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3721 var = TREE_OPERAND (var, 0);
3722 if (TREE_CODE (var) == INDIRECT_REF
3723 || TREE_CODE (var) == ADDR_EXPR)
3724 var = TREE_OPERAND (var, 0);
3725 if (is_variable_sized (var))
3727 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3728 var = DECL_VALUE_EXPR (var);
3729 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3730 var = TREE_OPERAND (var, 0);
3731 gcc_assert (DECL_P (var));
3733 new_var = var;
3735 if (c_kind != OMP_CLAUSE_COPYIN)
3736 new_var = lookup_decl (var, ctx);
3738 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3740 if (pass != 0)
3741 continue;
3743 /* C/C++ array section reductions. */
3744 else if (c_kind == OMP_CLAUSE_REDUCTION
3745 && var != OMP_CLAUSE_DECL (c))
3747 if (pass == 0)
3748 continue;
3750 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3751 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3752 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3754 tree b = TREE_OPERAND (orig_var, 1);
3755 b = maybe_lookup_decl (b, ctx);
3756 if (b == NULL)
3758 b = TREE_OPERAND (orig_var, 1);
3759 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3761 if (integer_zerop (bias))
3762 bias = b;
3763 else
3765 bias = fold_convert_loc (clause_loc,
3766 TREE_TYPE (b), bias);
3767 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3768 TREE_TYPE (b), b, bias);
3770 orig_var = TREE_OPERAND (orig_var, 0);
3772 if (TREE_CODE (orig_var) == INDIRECT_REF
3773 || TREE_CODE (orig_var) == ADDR_EXPR)
3774 orig_var = TREE_OPERAND (orig_var, 0);
3775 tree d = OMP_CLAUSE_DECL (c);
3776 tree type = TREE_TYPE (d);
3777 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3778 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3779 const char *name = get_name (orig_var);
3780 if (TREE_CONSTANT (v))
3782 x = create_tmp_var_raw (type, name);
3783 gimple_add_tmp_var (x);
3784 TREE_ADDRESSABLE (x) = 1;
3785 x = build_fold_addr_expr_loc (clause_loc, x);
3787 else
3789 tree atmp
3790 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3791 tree t = maybe_lookup_decl (v, ctx);
3792 if (t)
3793 v = t;
3794 else
3795 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3796 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3797 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3798 TREE_TYPE (v), v,
3799 build_int_cst (TREE_TYPE (v), 1));
3800 t = fold_build2_loc (clause_loc, MULT_EXPR,
3801 TREE_TYPE (v), t,
3802 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3803 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3804 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3807 tree ptype = build_pointer_type (TREE_TYPE (type));
3808 x = fold_convert_loc (clause_loc, ptype, x);
3809 tree y = create_tmp_var (ptype, name);
3810 gimplify_assign (y, x, ilist);
3811 x = y;
3812 tree yb = y;
3814 if (!integer_zerop (bias))
3816 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3817 bias);
3818 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3820 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3821 pointer_sized_int_node, yb, bias);
3822 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3823 yb = create_tmp_var (ptype, name);
3824 gimplify_assign (yb, x, ilist);
3825 x = yb;
3828 d = TREE_OPERAND (d, 0);
3829 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3830 d = TREE_OPERAND (d, 0);
3831 if (TREE_CODE (d) == ADDR_EXPR)
3833 if (orig_var != var)
3835 gcc_assert (is_variable_sized (orig_var));
3836 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3838 gimplify_assign (new_var, x, ilist);
3839 tree new_orig_var = lookup_decl (orig_var, ctx);
3840 tree t = build_fold_indirect_ref (new_var);
3841 DECL_IGNORED_P (new_var) = 0;
3842 TREE_THIS_NOTRAP (t);
3843 SET_DECL_VALUE_EXPR (new_orig_var, t);
3844 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3846 else
3848 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3849 build_int_cst (ptype, 0));
3850 SET_DECL_VALUE_EXPR (new_var, x);
3851 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3854 else
3856 gcc_assert (orig_var == var);
3857 if (TREE_CODE (d) == INDIRECT_REF)
3859 x = create_tmp_var (ptype, name);
3860 TREE_ADDRESSABLE (x) = 1;
3861 gimplify_assign (x, yb, ilist);
3862 x = build_fold_addr_expr_loc (clause_loc, x);
3864 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3865 gimplify_assign (new_var, x, ilist);
3867 tree y1 = create_tmp_var (ptype, NULL);
3868 gimplify_assign (y1, y, ilist);
3869 tree i2 = NULL_TREE, y2 = NULL_TREE;
3870 tree body2 = NULL_TREE, end2 = NULL_TREE;
3871 tree y3 = NULL_TREE, y4 = NULL_TREE;
3872 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3874 y2 = create_tmp_var (ptype, NULL);
3875 gimplify_assign (y2, y, ilist);
3876 tree ref = build_outer_var_ref (var, ctx);
3877 /* For ref build_outer_var_ref already performs this. */
3878 if (TREE_CODE (d) == INDIRECT_REF)
3879 gcc_assert (omp_is_reference (var));
3880 else if (TREE_CODE (d) == ADDR_EXPR)
3881 ref = build_fold_addr_expr (ref);
3882 else if (omp_is_reference (var))
3883 ref = build_fold_addr_expr (ref);
3884 ref = fold_convert_loc (clause_loc, ptype, ref);
3885 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3886 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3888 y3 = create_tmp_var (ptype, NULL);
3889 gimplify_assign (y3, unshare_expr (ref), ilist);
3891 if (is_simd)
3893 y4 = create_tmp_var (ptype, NULL);
3894 gimplify_assign (y4, ref, dlist);
3897 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3898 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3899 tree body = create_artificial_label (UNKNOWN_LOCATION);
3900 tree end = create_artificial_label (UNKNOWN_LOCATION);
3901 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3902 if (y2)
3904 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3905 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3906 body2 = create_artificial_label (UNKNOWN_LOCATION);
3907 end2 = create_artificial_label (UNKNOWN_LOCATION);
3908 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3910 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3912 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3913 tree decl_placeholder
3914 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3915 SET_DECL_VALUE_EXPR (decl_placeholder,
3916 build_simple_mem_ref (y1));
3917 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3918 SET_DECL_VALUE_EXPR (placeholder,
3919 y3 ? build_simple_mem_ref (y3)
3920 : error_mark_node);
3921 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3922 x = lang_hooks.decls.omp_clause_default_ctor
3923 (c, build_simple_mem_ref (y1),
3924 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3925 if (x)
3926 gimplify_and_add (x, ilist);
3927 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3929 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3930 lower_omp (&tseq, ctx);
3931 gimple_seq_add_seq (ilist, tseq);
3933 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3934 if (is_simd)
3936 SET_DECL_VALUE_EXPR (decl_placeholder,
3937 build_simple_mem_ref (y2));
3938 SET_DECL_VALUE_EXPR (placeholder,
3939 build_simple_mem_ref (y4));
3940 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3941 lower_omp (&tseq, ctx);
3942 gimple_seq_add_seq (dlist, tseq);
3943 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3945 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3946 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3947 x = lang_hooks.decls.omp_clause_dtor
3948 (c, build_simple_mem_ref (y2));
3949 if (x)
3951 gimple_seq tseq = NULL;
3952 dtor = x;
3953 gimplify_stmt (&dtor, &tseq);
3954 gimple_seq_add_seq (dlist, tseq);
3957 else
3959 x = omp_reduction_init (c, TREE_TYPE (type));
3960 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3962 /* reduction(-:var) sums up the partial results, so it
3963 acts identically to reduction(+:var). */
3964 if (code == MINUS_EXPR)
3965 code = PLUS_EXPR;
3967 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3968 if (is_simd)
3970 x = build2 (code, TREE_TYPE (type),
3971 build_simple_mem_ref (y4),
3972 build_simple_mem_ref (y2));
3973 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3976 gimple *g
3977 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3978 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3979 gimple_seq_add_stmt (ilist, g);
3980 if (y3)
3982 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3983 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3984 gimple_seq_add_stmt (ilist, g);
3986 g = gimple_build_assign (i, PLUS_EXPR, i,
3987 build_int_cst (TREE_TYPE (i), 1));
3988 gimple_seq_add_stmt (ilist, g);
3989 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3990 gimple_seq_add_stmt (ilist, g);
3991 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3992 if (y2)
3994 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3995 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3996 gimple_seq_add_stmt (dlist, g);
3997 if (y4)
3999 g = gimple_build_assign
4000 (y4, POINTER_PLUS_EXPR, y4,
4001 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4002 gimple_seq_add_stmt (dlist, g);
4004 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4005 build_int_cst (TREE_TYPE (i2), 1));
4006 gimple_seq_add_stmt (dlist, g);
4007 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4008 gimple_seq_add_stmt (dlist, g);
4009 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4011 continue;
4013 else if (is_variable_sized (var))
4015 /* For variable sized types, we need to allocate the
4016 actual storage here. Call alloca and store the
4017 result in the pointer decl that we created elsewhere. */
4018 if (pass == 0)
4019 continue;
4021 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4023 gcall *stmt;
4024 tree tmp, atmp;
4026 ptr = DECL_VALUE_EXPR (new_var);
4027 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4028 ptr = TREE_OPERAND (ptr, 0);
4029 gcc_assert (DECL_P (ptr));
4030 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4032 /* void *tmp = __builtin_alloca */
4033 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4034 stmt = gimple_build_call (atmp, 2, x,
4035 size_int (DECL_ALIGN (var)));
4036 tmp = create_tmp_var_raw (ptr_type_node);
4037 gimple_add_tmp_var (tmp);
4038 gimple_call_set_lhs (stmt, tmp);
4040 gimple_seq_add_stmt (ilist, stmt);
4042 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4043 gimplify_assign (ptr, x, ilist);
4046 else if (omp_is_reference (var))
4048 /* For references that are being privatized for Fortran,
4049 allocate new backing storage for the new pointer
4050 variable. This allows us to avoid changing all the
4051 code that expects a pointer to something that expects
4052 a direct variable. */
4053 if (pass == 0)
4054 continue;
4056 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4057 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4059 x = build_receiver_ref (var, false, ctx);
4060 x = build_fold_addr_expr_loc (clause_loc, x);
4062 else if (TREE_CONSTANT (x))
4064 /* For reduction in SIMD loop, defer adding the
4065 initialization of the reference, because if we decide
4066 to use SIMD array for it, the initilization could cause
4067 expansion ICE. */
4068 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4069 x = NULL_TREE;
4070 else
4072 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4073 get_name (var));
4074 gimple_add_tmp_var (x);
4075 TREE_ADDRESSABLE (x) = 1;
4076 x = build_fold_addr_expr_loc (clause_loc, x);
4079 else
4081 tree atmp
4082 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4083 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4084 tree al = size_int (TYPE_ALIGN (rtype));
4085 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4088 if (x)
4090 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4091 gimplify_assign (new_var, x, ilist);
4094 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4096 else if (c_kind == OMP_CLAUSE_REDUCTION
4097 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4099 if (pass == 0)
4100 continue;
4102 else if (pass != 0)
4103 continue;
4105 switch (OMP_CLAUSE_CODE (c))
4107 case OMP_CLAUSE_SHARED:
4108 /* Ignore shared directives in teams construct. */
4109 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4110 continue;
4111 /* Shared global vars are just accessed directly. */
4112 if (is_global_var (new_var))
4113 break;
4114 /* For taskloop firstprivate/lastprivate, represented
4115 as firstprivate and shared clause on the task, new_var
4116 is the firstprivate var. */
4117 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4118 break;
4119 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4120 needs to be delayed until after fixup_child_record_type so
4121 that we get the correct type during the dereference. */
4122 by_ref = use_pointer_for_field (var, ctx);
4123 x = build_receiver_ref (var, by_ref, ctx);
4124 SET_DECL_VALUE_EXPR (new_var, x);
4125 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4127 /* ??? If VAR is not passed by reference, and the variable
4128 hasn't been initialized yet, then we'll get a warning for
4129 the store into the omp_data_s structure. Ideally, we'd be
4130 able to notice this and not store anything at all, but
4131 we're generating code too early. Suppress the warning. */
4132 if (!by_ref)
4133 TREE_NO_WARNING (var) = 1;
4134 break;
4136 case OMP_CLAUSE_LASTPRIVATE:
4137 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4138 break;
4139 /* FALLTHRU */
4141 case OMP_CLAUSE_PRIVATE:
4142 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4143 x = build_outer_var_ref (var, ctx);
4144 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4146 if (is_task_ctx (ctx))
4147 x = build_receiver_ref (var, false, ctx);
4148 else
4149 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4151 else
4152 x = NULL;
4153 do_private:
4154 tree nx;
4155 nx = lang_hooks.decls.omp_clause_default_ctor
4156 (c, unshare_expr (new_var), x);
4157 if (is_simd)
4159 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4160 if ((TREE_ADDRESSABLE (new_var) || nx || y
4161 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4162 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4163 ivar, lvar))
4165 if (nx)
4166 x = lang_hooks.decls.omp_clause_default_ctor
4167 (c, unshare_expr (ivar), x);
4168 if (nx && x)
4169 gimplify_and_add (x, &llist[0]);
4170 if (y)
4172 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4173 if (y)
4175 gimple_seq tseq = NULL;
4177 dtor = y;
4178 gimplify_stmt (&dtor, &tseq);
4179 gimple_seq_add_seq (&llist[1], tseq);
4182 break;
4185 if (nx)
4186 gimplify_and_add (nx, ilist);
4187 /* FALLTHRU */
4189 do_dtor:
4190 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4191 if (x)
4193 gimple_seq tseq = NULL;
4195 dtor = x;
4196 gimplify_stmt (&dtor, &tseq);
4197 gimple_seq_add_seq (dlist, tseq);
4199 break;
4201 case OMP_CLAUSE_LINEAR:
4202 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4203 goto do_firstprivate;
4204 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4205 x = NULL;
4206 else
4207 x = build_outer_var_ref (var, ctx);
4208 goto do_private;
4210 case OMP_CLAUSE_FIRSTPRIVATE:
4211 if (is_task_ctx (ctx))
4213 if (omp_is_reference (var) || is_variable_sized (var))
4214 goto do_dtor;
4215 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4216 ctx))
4217 || use_pointer_for_field (var, NULL))
4219 x = build_receiver_ref (var, false, ctx);
4220 SET_DECL_VALUE_EXPR (new_var, x);
4221 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4222 goto do_dtor;
4225 do_firstprivate:
4226 x = build_outer_var_ref (var, ctx);
4227 if (is_simd)
4229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4230 && gimple_omp_for_combined_into_p (ctx->stmt))
4232 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4233 tree stept = TREE_TYPE (t);
4234 tree ct = omp_find_clause (clauses,
4235 OMP_CLAUSE__LOOPTEMP_);
4236 gcc_assert (ct);
4237 tree l = OMP_CLAUSE_DECL (ct);
4238 tree n1 = fd->loop.n1;
4239 tree step = fd->loop.step;
4240 tree itype = TREE_TYPE (l);
4241 if (POINTER_TYPE_P (itype))
4242 itype = signed_type_for (itype);
4243 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4244 if (TYPE_UNSIGNED (itype)
4245 && fd->loop.cond_code == GT_EXPR)
4246 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4247 fold_build1 (NEGATE_EXPR, itype, l),
4248 fold_build1 (NEGATE_EXPR,
4249 itype, step));
4250 else
4251 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4252 t = fold_build2 (MULT_EXPR, stept,
4253 fold_convert (stept, l), t);
4255 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4257 x = lang_hooks.decls.omp_clause_linear_ctor
4258 (c, new_var, x, t);
4259 gimplify_and_add (x, ilist);
4260 goto do_dtor;
4263 if (POINTER_TYPE_P (TREE_TYPE (x)))
4264 x = fold_build2 (POINTER_PLUS_EXPR,
4265 TREE_TYPE (x), x, t);
4266 else
4267 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4270 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4271 || TREE_ADDRESSABLE (new_var))
4272 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4273 ivar, lvar))
4275 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4277 tree iv = create_tmp_var (TREE_TYPE (new_var));
4278 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4279 gimplify_and_add (x, ilist);
4280 gimple_stmt_iterator gsi
4281 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4282 gassign *g
4283 = gimple_build_assign (unshare_expr (lvar), iv);
4284 gsi_insert_before_without_update (&gsi, g,
4285 GSI_SAME_STMT);
4286 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4287 enum tree_code code = PLUS_EXPR;
4288 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4289 code = POINTER_PLUS_EXPR;
4290 g = gimple_build_assign (iv, code, iv, t);
4291 gsi_insert_before_without_update (&gsi, g,
4292 GSI_SAME_STMT);
4293 break;
4295 x = lang_hooks.decls.omp_clause_copy_ctor
4296 (c, unshare_expr (ivar), x);
4297 gimplify_and_add (x, &llist[0]);
4298 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4299 if (x)
4301 gimple_seq tseq = NULL;
4303 dtor = x;
4304 gimplify_stmt (&dtor, &tseq);
4305 gimple_seq_add_seq (&llist[1], tseq);
4307 break;
4310 x = lang_hooks.decls.omp_clause_copy_ctor
4311 (c, unshare_expr (new_var), x);
4312 gimplify_and_add (x, ilist);
4313 goto do_dtor;
4315 case OMP_CLAUSE__LOOPTEMP_:
4316 gcc_assert (is_taskreg_ctx (ctx));
4317 x = build_outer_var_ref (var, ctx);
4318 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4319 gimplify_and_add (x, ilist);
4320 break;
4322 case OMP_CLAUSE_COPYIN:
4323 by_ref = use_pointer_for_field (var, NULL);
4324 x = build_receiver_ref (var, by_ref, ctx);
4325 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4326 append_to_statement_list (x, &copyin_seq);
4327 copyin_by_ref |= by_ref;
4328 break;
4330 case OMP_CLAUSE_REDUCTION:
4331 /* OpenACC reductions are initialized using the
4332 GOACC_REDUCTION internal function. */
4333 if (is_gimple_omp_oacc (ctx->stmt))
4334 break;
4335 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4337 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4338 gimple *tseq;
4339 x = build_outer_var_ref (var, ctx);
4341 if (omp_is_reference (var)
4342 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4343 TREE_TYPE (x)))
4344 x = build_fold_addr_expr_loc (clause_loc, x);
4345 SET_DECL_VALUE_EXPR (placeholder, x);
4346 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4347 tree new_vard = new_var;
4348 if (omp_is_reference (var))
4350 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4351 new_vard = TREE_OPERAND (new_var, 0);
4352 gcc_assert (DECL_P (new_vard));
4354 if (is_simd
4355 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4356 ivar, lvar))
4358 if (new_vard == new_var)
4360 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4361 SET_DECL_VALUE_EXPR (new_var, ivar);
4363 else
4365 SET_DECL_VALUE_EXPR (new_vard,
4366 build_fold_addr_expr (ivar));
4367 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4369 x = lang_hooks.decls.omp_clause_default_ctor
4370 (c, unshare_expr (ivar),
4371 build_outer_var_ref (var, ctx));
4372 if (x)
4373 gimplify_and_add (x, &llist[0]);
4374 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4376 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4377 lower_omp (&tseq, ctx);
4378 gimple_seq_add_seq (&llist[0], tseq);
4380 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4381 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4382 lower_omp (&tseq, ctx);
4383 gimple_seq_add_seq (&llist[1], tseq);
4384 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4385 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4386 if (new_vard == new_var)
4387 SET_DECL_VALUE_EXPR (new_var, lvar);
4388 else
4389 SET_DECL_VALUE_EXPR (new_vard,
4390 build_fold_addr_expr (lvar));
4391 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4392 if (x)
4394 tseq = NULL;
4395 dtor = x;
4396 gimplify_stmt (&dtor, &tseq);
4397 gimple_seq_add_seq (&llist[1], tseq);
4399 break;
4401 /* If this is a reference to constant size reduction var
4402 with placeholder, we haven't emitted the initializer
4403 for it because it is undesirable if SIMD arrays are used.
4404 But if they aren't used, we need to emit the deferred
4405 initialization now. */
4406 else if (omp_is_reference (var) && is_simd)
4407 handle_simd_reference (clause_loc, new_vard, ilist);
4408 x = lang_hooks.decls.omp_clause_default_ctor
4409 (c, unshare_expr (new_var),
4410 build_outer_var_ref (var, ctx));
4411 if (x)
4412 gimplify_and_add (x, ilist);
4413 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4415 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4416 lower_omp (&tseq, ctx);
4417 gimple_seq_add_seq (ilist, tseq);
4419 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4420 if (is_simd)
4422 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4423 lower_omp (&tseq, ctx);
4424 gimple_seq_add_seq (dlist, tseq);
4425 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4427 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4428 goto do_dtor;
4430 else
4432 x = omp_reduction_init (c, TREE_TYPE (new_var));
4433 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4434 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4436 /* reduction(-:var) sums up the partial results, so it
4437 acts identically to reduction(+:var). */
4438 if (code == MINUS_EXPR)
4439 code = PLUS_EXPR;
4441 tree new_vard = new_var;
4442 if (is_simd && omp_is_reference (var))
4444 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4445 new_vard = TREE_OPERAND (new_var, 0);
4446 gcc_assert (DECL_P (new_vard));
4448 if (is_simd
4449 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4450 ivar, lvar))
4452 tree ref = build_outer_var_ref (var, ctx);
4454 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4456 if (sctx.is_simt)
4458 if (!simt_lane)
4459 simt_lane = create_tmp_var (unsigned_type_node);
4460 x = build_call_expr_internal_loc
4461 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4462 TREE_TYPE (ivar), 2, ivar, simt_lane);
4463 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4464 gimplify_assign (ivar, x, &llist[2]);
4466 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4467 ref = build_outer_var_ref (var, ctx);
4468 gimplify_assign (ref, x, &llist[1]);
4470 if (new_vard != new_var)
4472 SET_DECL_VALUE_EXPR (new_vard,
4473 build_fold_addr_expr (lvar));
4474 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4477 else
4479 if (omp_is_reference (var) && is_simd)
4480 handle_simd_reference (clause_loc, new_vard, ilist);
4481 gimplify_assign (new_var, x, ilist);
4482 if (is_simd)
4484 tree ref = build_outer_var_ref (var, ctx);
4486 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4487 ref = build_outer_var_ref (var, ctx);
4488 gimplify_assign (ref, x, dlist);
4492 break;
4494 default:
4495 gcc_unreachable ();
4500 if (sctx.max_vf == 1)
4501 sctx.is_simt = false;
4503 if (sctx.lane || sctx.is_simt)
4505 uid = create_tmp_var (ptr_type_node, "simduid");
4506 /* Don't want uninit warnings on simduid, it is always uninitialized,
4507 but we use it not for the value, but for the DECL_UID only. */
4508 TREE_NO_WARNING (uid) = 1;
4509 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4510 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4511 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4512 gimple_omp_for_set_clauses (ctx->stmt, c);
4514 /* Emit calls denoting privatized variables and initializing a pointer to
4515 structure that holds private variables as fields after ompdevlow pass. */
4516 if (sctx.is_simt)
4518 sctx.simt_eargs[0] = uid;
4519 gimple *g
4520 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4521 gimple_call_set_lhs (g, uid);
4522 gimple_seq_add_stmt (ilist, g);
4523 sctx.simt_eargs.release ();
4525 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4526 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4527 gimple_call_set_lhs (g, simtrec);
4528 gimple_seq_add_stmt (ilist, g);
4530 if (sctx.lane)
4532 gimple *g
4533 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4534 gimple_call_set_lhs (g, sctx.lane);
4535 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4536 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4537 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4538 build_int_cst (unsigned_type_node, 0));
4539 gimple_seq_add_stmt (ilist, g);
4540 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4541 if (llist[2])
4543 tree simt_vf = create_tmp_var (unsigned_type_node);
4544 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4545 gimple_call_set_lhs (g, simt_vf);
4546 gimple_seq_add_stmt (dlist, g);
4548 tree t = build_int_cst (unsigned_type_node, 1);
4549 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4550 gimple_seq_add_stmt (dlist, g);
4552 t = build_int_cst (unsigned_type_node, 0);
4553 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4554 gimple_seq_add_stmt (dlist, g);
4556 tree body = create_artificial_label (UNKNOWN_LOCATION);
4557 tree header = create_artificial_label (UNKNOWN_LOCATION);
4558 tree end = create_artificial_label (UNKNOWN_LOCATION);
4559 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4560 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4562 gimple_seq_add_seq (dlist, llist[2]);
4564 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4565 gimple_seq_add_stmt (dlist, g);
4567 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4568 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4569 gimple_seq_add_stmt (dlist, g);
4571 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4573 for (int i = 0; i < 2; i++)
4574 if (llist[i])
4576 tree vf = create_tmp_var (unsigned_type_node);
4577 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4578 gimple_call_set_lhs (g, vf);
4579 gimple_seq *seq = i == 0 ? ilist : dlist;
4580 gimple_seq_add_stmt (seq, g);
4581 tree t = build_int_cst (unsigned_type_node, 0);
4582 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4583 gimple_seq_add_stmt (seq, g);
4584 tree body = create_artificial_label (UNKNOWN_LOCATION);
4585 tree header = create_artificial_label (UNKNOWN_LOCATION);
4586 tree end = create_artificial_label (UNKNOWN_LOCATION);
4587 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4588 gimple_seq_add_stmt (seq, gimple_build_label (body));
4589 gimple_seq_add_seq (seq, llist[i]);
4590 t = build_int_cst (unsigned_type_node, 1);
4591 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4592 gimple_seq_add_stmt (seq, g);
4593 gimple_seq_add_stmt (seq, gimple_build_label (header));
4594 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4595 gimple_seq_add_stmt (seq, g);
4596 gimple_seq_add_stmt (seq, gimple_build_label (end));
4599 if (sctx.is_simt)
4601 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4602 gimple *g
4603 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4604 gimple_seq_add_stmt (dlist, g);
4607 /* The copyin sequence is not to be executed by the main thread, since
4608 that would result in self-copies. Perhaps not visible to scalars,
4609 but it certainly is to C++ operator=. */
4610 if (copyin_seq)
4612 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4614 x = build2 (NE_EXPR, boolean_type_node, x,
4615 build_int_cst (TREE_TYPE (x), 0));
4616 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4617 gimplify_and_add (x, ilist);
4620 /* If any copyin variable is passed by reference, we must ensure the
4621 master thread doesn't modify it before it is copied over in all
4622 threads. Similarly for variables in both firstprivate and
4623 lastprivate clauses we need to ensure the lastprivate copying
4624 happens after firstprivate copying in all threads. And similarly
4625 for UDRs if initializer expression refers to omp_orig. */
4626 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4628 /* Don't add any barrier for #pragma omp simd or
4629 #pragma omp distribute. */
4630 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4631 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4632 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4635 /* If max_vf is non-zero, then we can use only a vectorization factor
4636 up to the max_vf we chose. So stick it into the safelen clause. */
4637 if (sctx.max_vf)
4639 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4640 OMP_CLAUSE_SAFELEN);
4641 if (c == NULL_TREE
4642 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4643 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4644 sctx.max_vf) == 1))
4646 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4647 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4648 sctx.max_vf);
4649 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4650 gimple_omp_for_set_clauses (ctx->stmt, c);
4656 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4657 both parallel and workshare constructs. PREDICATE may be NULL if it's
4658 always true. */
4660 static void
4661 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4662 omp_context *ctx)
4664 tree x, c, label = NULL, orig_clauses = clauses;
4665 bool par_clauses = false;
4666 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4668 /* Early exit if there are no lastprivate or linear clauses. */
4669 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4670 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4671 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4672 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4673 break;
4674 if (clauses == NULL)
4676 /* If this was a workshare clause, see if it had been combined
4677 with its parallel. In that case, look for the clauses on the
4678 parallel statement itself. */
4679 if (is_parallel_ctx (ctx))
4680 return;
4682 ctx = ctx->outer;
4683 if (ctx == NULL || !is_parallel_ctx (ctx))
4684 return;
4686 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4687 OMP_CLAUSE_LASTPRIVATE);
4688 if (clauses == NULL)
4689 return;
4690 par_clauses = true;
4693 bool maybe_simt = false;
4694 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4695 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4697 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4698 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4699 if (simduid)
4700 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4703 if (predicate)
4705 gcond *stmt;
4706 tree label_true, arm1, arm2;
4707 enum tree_code pred_code = TREE_CODE (predicate);
4709 label = create_artificial_label (UNKNOWN_LOCATION);
4710 label_true = create_artificial_label (UNKNOWN_LOCATION);
4711 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4713 arm1 = TREE_OPERAND (predicate, 0);
4714 arm2 = TREE_OPERAND (predicate, 1);
4715 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4716 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4718 else
4720 arm1 = predicate;
4721 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4722 arm2 = boolean_false_node;
4723 pred_code = NE_EXPR;
4725 if (maybe_simt)
4727 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4728 c = fold_convert (integer_type_node, c);
4729 simtcond = create_tmp_var (integer_type_node);
4730 gimplify_assign (simtcond, c, stmt_list);
4731 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4732 1, simtcond);
4733 c = create_tmp_var (integer_type_node);
4734 gimple_call_set_lhs (g, c);
4735 gimple_seq_add_stmt (stmt_list, g);
4736 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4737 label_true, label);
4739 else
4740 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4741 gimple_seq_add_stmt (stmt_list, stmt);
4742 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4745 for (c = clauses; c ;)
4747 tree var, new_var;
4748 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4750 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4751 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4752 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4754 var = OMP_CLAUSE_DECL (c);
4755 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4756 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4757 && is_taskloop_ctx (ctx))
4759 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4760 new_var = lookup_decl (var, ctx->outer);
4762 else
4764 new_var = lookup_decl (var, ctx);
4765 /* Avoid uninitialized warnings for lastprivate and
4766 for linear iterators. */
4767 if (predicate
4768 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4769 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4770 TREE_NO_WARNING (new_var) = 1;
4773 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4775 tree val = DECL_VALUE_EXPR (new_var);
4776 if (TREE_CODE (val) == ARRAY_REF
4777 && VAR_P (TREE_OPERAND (val, 0))
4778 && lookup_attribute ("omp simd array",
4779 DECL_ATTRIBUTES (TREE_OPERAND (val,
4780 0))))
4782 if (lastlane == NULL)
4784 lastlane = create_tmp_var (unsigned_type_node);
4785 gcall *g
4786 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4787 2, simduid,
4788 TREE_OPERAND (val, 1));
4789 gimple_call_set_lhs (g, lastlane);
4790 gimple_seq_add_stmt (stmt_list, g);
4792 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4793 TREE_OPERAND (val, 0), lastlane,
4794 NULL_TREE, NULL_TREE);
4797 else if (maybe_simt)
4799 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4800 ? DECL_VALUE_EXPR (new_var)
4801 : new_var);
4802 if (simtlast == NULL)
4804 simtlast = create_tmp_var (unsigned_type_node);
4805 gcall *g = gimple_build_call_internal
4806 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4807 gimple_call_set_lhs (g, simtlast);
4808 gimple_seq_add_stmt (stmt_list, g);
4810 x = build_call_expr_internal_loc
4811 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4812 TREE_TYPE (val), 2, val, simtlast);
4813 new_var = unshare_expr (new_var);
4814 gimplify_assign (new_var, x, stmt_list);
4815 new_var = unshare_expr (new_var);
4818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4819 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4821 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4822 gimple_seq_add_seq (stmt_list,
4823 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4824 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4826 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4827 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4829 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4830 gimple_seq_add_seq (stmt_list,
4831 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4832 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4835 x = NULL_TREE;
4836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4837 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4839 gcc_checking_assert (is_taskloop_ctx (ctx));
4840 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4841 ctx->outer->outer);
4842 if (is_global_var (ovar))
4843 x = ovar;
4845 if (!x)
4846 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4847 if (omp_is_reference (var))
4848 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4849 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4850 gimplify_and_add (x, stmt_list);
4852 c = OMP_CLAUSE_CHAIN (c);
4853 if (c == NULL && !par_clauses)
4855 /* If this was a workshare clause, see if it had been combined
4856 with its parallel. In that case, continue looking for the
4857 clauses also on the parallel statement itself. */
4858 if (is_parallel_ctx (ctx))
4859 break;
4861 ctx = ctx->outer;
4862 if (ctx == NULL || !is_parallel_ctx (ctx))
4863 break;
4865 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4866 OMP_CLAUSE_LASTPRIVATE);
4867 par_clauses = true;
4871 if (label)
4872 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4875 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4876 (which might be a placeholder). INNER is true if this is an inner
4877 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4878 join markers. Generate the before-loop forking sequence in
4879 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4880 general form of these sequences is
4882 GOACC_REDUCTION_SETUP
4883 GOACC_FORK
4884 GOACC_REDUCTION_INIT
4886 GOACC_REDUCTION_FINI
4887 GOACC_JOIN
4888 GOACC_REDUCTION_TEARDOWN. */
4890 static void
4891 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4892 gcall *fork, gcall *join, gimple_seq *fork_seq,
4893 gimple_seq *join_seq, omp_context *ctx)
4895 gimple_seq before_fork = NULL;
4896 gimple_seq after_fork = NULL;
4897 gimple_seq before_join = NULL;
4898 gimple_seq after_join = NULL;
4899 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4900 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4901 unsigned offset = 0;
4903 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4904 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4906 tree orig = OMP_CLAUSE_DECL (c);
4907 tree var = maybe_lookup_decl (orig, ctx);
4908 tree ref_to_res = NULL_TREE;
4909 tree incoming, outgoing, v1, v2, v3;
4910 bool is_private = false;
4912 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4913 if (rcode == MINUS_EXPR)
4914 rcode = PLUS_EXPR;
4915 else if (rcode == TRUTH_ANDIF_EXPR)
4916 rcode = BIT_AND_EXPR;
4917 else if (rcode == TRUTH_ORIF_EXPR)
4918 rcode = BIT_IOR_EXPR;
4919 tree op = build_int_cst (unsigned_type_node, rcode);
4921 if (!var)
4922 var = orig;
4924 incoming = outgoing = var;
4926 if (!inner)
4928 /* See if an outer construct also reduces this variable. */
4929 omp_context *outer = ctx;
4931 while (omp_context *probe = outer->outer)
4933 enum gimple_code type = gimple_code (probe->stmt);
4934 tree cls;
4936 switch (type)
4938 case GIMPLE_OMP_FOR:
4939 cls = gimple_omp_for_clauses (probe->stmt);
4940 break;
4942 case GIMPLE_OMP_TARGET:
4943 if (gimple_omp_target_kind (probe->stmt)
4944 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4945 goto do_lookup;
4947 cls = gimple_omp_target_clauses (probe->stmt);
4948 break;
4950 default:
4951 goto do_lookup;
4954 outer = probe;
4955 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4956 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4957 && orig == OMP_CLAUSE_DECL (cls))
4959 incoming = outgoing = lookup_decl (orig, probe);
4960 goto has_outer_reduction;
4962 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4963 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4964 && orig == OMP_CLAUSE_DECL (cls))
4966 is_private = true;
4967 goto do_lookup;
4971 do_lookup:
4972 /* This is the outermost construct with this reduction,
4973 see if there's a mapping for it. */
4974 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4975 && maybe_lookup_field (orig, outer) && !is_private)
4977 ref_to_res = build_receiver_ref (orig, false, outer);
4978 if (omp_is_reference (orig))
4979 ref_to_res = build_simple_mem_ref (ref_to_res);
4981 tree type = TREE_TYPE (var);
4982 if (POINTER_TYPE_P (type))
4983 type = TREE_TYPE (type);
4985 outgoing = var;
4986 incoming = omp_reduction_init_op (loc, rcode, type);
4988 else
4990 /* Try to look at enclosing contexts for reduction var,
4991 use original if no mapping found. */
4992 tree t = NULL_TREE;
4993 omp_context *c = ctx->outer;
4994 while (c && !t)
4996 t = maybe_lookup_decl (orig, c);
4997 c = c->outer;
4999 incoming = outgoing = (t ? t : orig);
5002 has_outer_reduction:;
5005 if (!ref_to_res)
5006 ref_to_res = integer_zero_node;
5008 if (omp_is_reference (orig))
5010 tree type = TREE_TYPE (var);
5011 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5013 if (!inner)
5015 tree x = create_tmp_var (TREE_TYPE (type), id);
5016 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5019 v1 = create_tmp_var (type, id);
5020 v2 = create_tmp_var (type, id);
5021 v3 = create_tmp_var (type, id);
5023 gimplify_assign (v1, var, fork_seq);
5024 gimplify_assign (v2, var, fork_seq);
5025 gimplify_assign (v3, var, fork_seq);
5027 var = build_simple_mem_ref (var);
5028 v1 = build_simple_mem_ref (v1);
5029 v2 = build_simple_mem_ref (v2);
5030 v3 = build_simple_mem_ref (v3);
5031 outgoing = build_simple_mem_ref (outgoing);
5033 if (!TREE_CONSTANT (incoming))
5034 incoming = build_simple_mem_ref (incoming);
5036 else
5037 v1 = v2 = v3 = var;
5039 /* Determine position in reduction buffer, which may be used
5040 by target. */
5041 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5042 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5043 offset = (offset + align - 1) & ~(align - 1);
5044 tree off = build_int_cst (sizetype, offset);
5045 offset += GET_MODE_SIZE (mode);
5047 if (!init_code)
5049 init_code = build_int_cst (integer_type_node,
5050 IFN_GOACC_REDUCTION_INIT);
5051 fini_code = build_int_cst (integer_type_node,
5052 IFN_GOACC_REDUCTION_FINI);
5053 setup_code = build_int_cst (integer_type_node,
5054 IFN_GOACC_REDUCTION_SETUP);
5055 teardown_code = build_int_cst (integer_type_node,
5056 IFN_GOACC_REDUCTION_TEARDOWN);
5059 tree setup_call
5060 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5061 TREE_TYPE (var), 6, setup_code,
5062 unshare_expr (ref_to_res),
5063 incoming, level, op, off);
5064 tree init_call
5065 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5066 TREE_TYPE (var), 6, init_code,
5067 unshare_expr (ref_to_res),
5068 v1, level, op, off);
5069 tree fini_call
5070 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5071 TREE_TYPE (var), 6, fini_code,
5072 unshare_expr (ref_to_res),
5073 v2, level, op, off);
5074 tree teardown_call
5075 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5076 TREE_TYPE (var), 6, teardown_code,
5077 ref_to_res, v3, level, op, off);
5079 gimplify_assign (v1, setup_call, &before_fork);
5080 gimplify_assign (v2, init_call, &after_fork);
5081 gimplify_assign (v3, fini_call, &before_join);
5082 gimplify_assign (outgoing, teardown_call, &after_join);
5085 /* Now stitch things together. */
5086 gimple_seq_add_seq (fork_seq, before_fork);
5087 if (fork)
5088 gimple_seq_add_stmt (fork_seq, fork);
5089 gimple_seq_add_seq (fork_seq, after_fork);
5091 gimple_seq_add_seq (join_seq, before_join);
5092 if (join)
5093 gimple_seq_add_stmt (join_seq, join);
5094 gimple_seq_add_seq (join_seq, after_join);
5097 /* Generate code to implement the REDUCTION clauses. */
5099 static void
5100 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5102 gimple_seq sub_seq = NULL;
5103 gimple *stmt;
5104 tree x, c;
5105 int count = 0;
5107 /* OpenACC loop reductions are handled elsewhere. */
5108 if (is_gimple_omp_oacc (ctx->stmt))
5109 return;
5111 /* SIMD reductions are handled in lower_rec_input_clauses. */
5112 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5113 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5114 return;
5116 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5117 update in that case, otherwise use a lock. */
5118 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5119 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5121 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5122 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5124 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5125 count = -1;
5126 break;
5128 count++;
5131 if (count == 0)
5132 return;
5134 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5136 tree var, ref, new_var, orig_var;
5137 enum tree_code code;
5138 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5140 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5141 continue;
5143 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5144 orig_var = var = OMP_CLAUSE_DECL (c);
5145 if (TREE_CODE (var) == MEM_REF)
5147 var = TREE_OPERAND (var, 0);
5148 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5149 var = TREE_OPERAND (var, 0);
5150 if (TREE_CODE (var) == ADDR_EXPR)
5151 var = TREE_OPERAND (var, 0);
5152 else
5154 /* If this is a pointer or referenced based array
5155 section, the var could be private in the outer
5156 context e.g. on orphaned loop construct. Pretend this
5157 is private variable's outer reference. */
5158 ccode = OMP_CLAUSE_PRIVATE;
5159 if (TREE_CODE (var) == INDIRECT_REF)
5160 var = TREE_OPERAND (var, 0);
5162 orig_var = var;
5163 if (is_variable_sized (var))
5165 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5166 var = DECL_VALUE_EXPR (var);
5167 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5168 var = TREE_OPERAND (var, 0);
5169 gcc_assert (DECL_P (var));
5172 new_var = lookup_decl (var, ctx);
5173 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5174 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5175 ref = build_outer_var_ref (var, ctx, ccode);
5176 code = OMP_CLAUSE_REDUCTION_CODE (c);
5178 /* reduction(-:var) sums up the partial results, so it acts
5179 identically to reduction(+:var). */
5180 if (code == MINUS_EXPR)
5181 code = PLUS_EXPR;
5183 if (count == 1)
5185 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5187 addr = save_expr (addr);
5188 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5189 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5190 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5191 gimplify_and_add (x, stmt_seqp);
5192 return;
5194 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5196 tree d = OMP_CLAUSE_DECL (c);
5197 tree type = TREE_TYPE (d);
5198 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5199 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5200 tree ptype = build_pointer_type (TREE_TYPE (type));
5201 tree bias = TREE_OPERAND (d, 1);
5202 d = TREE_OPERAND (d, 0);
5203 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5205 tree b = TREE_OPERAND (d, 1);
5206 b = maybe_lookup_decl (b, ctx);
5207 if (b == NULL)
5209 b = TREE_OPERAND (d, 1);
5210 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5212 if (integer_zerop (bias))
5213 bias = b;
5214 else
5216 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5217 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5218 TREE_TYPE (b), b, bias);
5220 d = TREE_OPERAND (d, 0);
5222 /* For ref build_outer_var_ref already performs this, so
5223 only new_var needs a dereference. */
5224 if (TREE_CODE (d) == INDIRECT_REF)
5226 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5227 gcc_assert (omp_is_reference (var) && var == orig_var);
5229 else if (TREE_CODE (d) == ADDR_EXPR)
5231 if (orig_var == var)
5233 new_var = build_fold_addr_expr (new_var);
5234 ref = build_fold_addr_expr (ref);
5237 else
5239 gcc_assert (orig_var == var);
5240 if (omp_is_reference (var))
5241 ref = build_fold_addr_expr (ref);
5243 if (DECL_P (v))
5245 tree t = maybe_lookup_decl (v, ctx);
5246 if (t)
5247 v = t;
5248 else
5249 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5250 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5252 if (!integer_zerop (bias))
5254 bias = fold_convert_loc (clause_loc, sizetype, bias);
5255 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5256 TREE_TYPE (new_var), new_var,
5257 unshare_expr (bias));
5258 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5259 TREE_TYPE (ref), ref, bias);
5261 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5262 ref = fold_convert_loc (clause_loc, ptype, ref);
5263 tree m = create_tmp_var (ptype, NULL);
5264 gimplify_assign (m, new_var, stmt_seqp);
5265 new_var = m;
5266 m = create_tmp_var (ptype, NULL);
5267 gimplify_assign (m, ref, stmt_seqp);
5268 ref = m;
5269 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5270 tree body = create_artificial_label (UNKNOWN_LOCATION);
5271 tree end = create_artificial_label (UNKNOWN_LOCATION);
5272 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5273 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5274 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5275 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5277 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5278 tree decl_placeholder
5279 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5280 SET_DECL_VALUE_EXPR (placeholder, out);
5281 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5282 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5283 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5284 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5285 gimple_seq_add_seq (&sub_seq,
5286 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5287 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5288 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5289 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5291 else
5293 x = build2 (code, TREE_TYPE (out), out, priv);
5294 out = unshare_expr (out);
5295 gimplify_assign (out, x, &sub_seq);
5297 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5298 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5299 gimple_seq_add_stmt (&sub_seq, g);
5300 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5301 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5302 gimple_seq_add_stmt (&sub_seq, g);
5303 g = gimple_build_assign (i, PLUS_EXPR, i,
5304 build_int_cst (TREE_TYPE (i), 1));
5305 gimple_seq_add_stmt (&sub_seq, g);
5306 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5307 gimple_seq_add_stmt (&sub_seq, g);
5308 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5310 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5312 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5314 if (omp_is_reference (var)
5315 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5316 TREE_TYPE (ref)))
5317 ref = build_fold_addr_expr_loc (clause_loc, ref);
5318 SET_DECL_VALUE_EXPR (placeholder, ref);
5319 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5320 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5321 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5322 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5323 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5325 else
5327 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5328 ref = build_outer_var_ref (var, ctx);
5329 gimplify_assign (ref, x, &sub_seq);
5333 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5335 gimple_seq_add_stmt (stmt_seqp, stmt);
5337 gimple_seq_add_seq (stmt_seqp, sub_seq);
5339 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5341 gimple_seq_add_stmt (stmt_seqp, stmt);
5345 /* Generate code to implement the COPYPRIVATE clauses. */
5347 static void
5348 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5349 omp_context *ctx)
5351 tree c;
5353 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5355 tree var, new_var, ref, x;
5356 bool by_ref;
5357 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5359 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5360 continue;
5362 var = OMP_CLAUSE_DECL (c);
5363 by_ref = use_pointer_for_field (var, NULL);
5365 ref = build_sender_ref (var, ctx);
5366 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5367 if (by_ref)
5369 x = build_fold_addr_expr_loc (clause_loc, new_var);
5370 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5372 gimplify_assign (ref, x, slist);
5374 ref = build_receiver_ref (var, false, ctx);
5375 if (by_ref)
5377 ref = fold_convert_loc (clause_loc,
5378 build_pointer_type (TREE_TYPE (new_var)),
5379 ref);
5380 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5382 if (omp_is_reference (var))
5384 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5385 ref = build_simple_mem_ref_loc (clause_loc, ref);
5386 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5388 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5389 gimplify_and_add (x, rlist);
5394 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5395 and REDUCTION from the sender (aka parent) side. */
5397 static void
5398 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5399 omp_context *ctx)
5401 tree c, t;
5402 int ignored_looptemp = 0;
5403 bool is_taskloop = false;
5405 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5406 by GOMP_taskloop. */
5407 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5409 ignored_looptemp = 2;
5410 is_taskloop = true;
5413 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5415 tree val, ref, x, var;
5416 bool by_ref, do_in = false, do_out = false;
5417 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5419 switch (OMP_CLAUSE_CODE (c))
5421 case OMP_CLAUSE_PRIVATE:
5422 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5423 break;
5424 continue;
5425 case OMP_CLAUSE_FIRSTPRIVATE:
5426 case OMP_CLAUSE_COPYIN:
5427 case OMP_CLAUSE_LASTPRIVATE:
5428 case OMP_CLAUSE_REDUCTION:
5429 break;
5430 case OMP_CLAUSE_SHARED:
5431 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5432 break;
5433 continue;
5434 case OMP_CLAUSE__LOOPTEMP_:
5435 if (ignored_looptemp)
5437 ignored_looptemp--;
5438 continue;
5440 break;
5441 default:
5442 continue;
5445 val = OMP_CLAUSE_DECL (c);
5446 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5447 && TREE_CODE (val) == MEM_REF)
5449 val = TREE_OPERAND (val, 0);
5450 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5451 val = TREE_OPERAND (val, 0);
5452 if (TREE_CODE (val) == INDIRECT_REF
5453 || TREE_CODE (val) == ADDR_EXPR)
5454 val = TREE_OPERAND (val, 0);
5455 if (is_variable_sized (val))
5456 continue;
5459 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5460 outer taskloop region. */
5461 omp_context *ctx_for_o = ctx;
5462 if (is_taskloop
5463 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5464 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5465 ctx_for_o = ctx->outer;
5467 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5469 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5470 && is_global_var (var))
5471 continue;
5473 t = omp_member_access_dummy_var (var);
5474 if (t)
5476 var = DECL_VALUE_EXPR (var);
5477 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5478 if (o != t)
5479 var = unshare_and_remap (var, t, o);
5480 else
5481 var = unshare_expr (var);
5484 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5486 /* Handle taskloop firstprivate/lastprivate, where the
5487 lastprivate on GIMPLE_OMP_TASK is represented as
5488 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5489 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5490 x = omp_build_component_ref (ctx->sender_decl, f);
5491 if (use_pointer_for_field (val, ctx))
5492 var = build_fold_addr_expr (var);
5493 gimplify_assign (x, var, ilist);
5494 DECL_ABSTRACT_ORIGIN (f) = NULL;
5495 continue;
5498 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5499 || val == OMP_CLAUSE_DECL (c))
5500 && is_variable_sized (val))
5501 continue;
5502 by_ref = use_pointer_for_field (val, NULL);
5504 switch (OMP_CLAUSE_CODE (c))
5506 case OMP_CLAUSE_FIRSTPRIVATE:
5507 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5508 && !by_ref
5509 && is_task_ctx (ctx))
5510 TREE_NO_WARNING (var) = 1;
5511 do_in = true;
5512 break;
5514 case OMP_CLAUSE_PRIVATE:
5515 case OMP_CLAUSE_COPYIN:
5516 case OMP_CLAUSE__LOOPTEMP_:
5517 do_in = true;
5518 break;
5520 case OMP_CLAUSE_LASTPRIVATE:
5521 if (by_ref || omp_is_reference (val))
5523 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5524 continue;
5525 do_in = true;
5527 else
5529 do_out = true;
5530 if (lang_hooks.decls.omp_private_outer_ref (val))
5531 do_in = true;
5533 break;
5535 case OMP_CLAUSE_REDUCTION:
5536 do_in = true;
5537 if (val == OMP_CLAUSE_DECL (c))
5538 do_out = !(by_ref || omp_is_reference (val));
5539 else
5540 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5541 break;
5543 default:
5544 gcc_unreachable ();
5547 if (do_in)
5549 ref = build_sender_ref (val, ctx);
5550 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5551 gimplify_assign (ref, x, ilist);
5552 if (is_task_ctx (ctx))
5553 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5556 if (do_out)
5558 ref = build_sender_ref (val, ctx);
5559 gimplify_assign (var, ref, olist);
5564 /* Generate code to implement SHARED from the sender (aka parent)
5565 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5566 list things that got automatically shared. */
5568 static void
5569 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5571 tree var, ovar, nvar, t, f, x, record_type;
5573 if (ctx->record_type == NULL)
5574 return;
5576 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5577 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5579 ovar = DECL_ABSTRACT_ORIGIN (f);
5580 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5581 continue;
5583 nvar = maybe_lookup_decl (ovar, ctx);
5584 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5585 continue;
5587 /* If CTX is a nested parallel directive. Find the immediately
5588 enclosing parallel or workshare construct that contains a
5589 mapping for OVAR. */
5590 var = lookup_decl_in_outer_ctx (ovar, ctx);
5592 t = omp_member_access_dummy_var (var);
5593 if (t)
5595 var = DECL_VALUE_EXPR (var);
5596 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5597 if (o != t)
5598 var = unshare_and_remap (var, t, o);
5599 else
5600 var = unshare_expr (var);
5603 if (use_pointer_for_field (ovar, ctx))
5605 x = build_sender_ref (ovar, ctx);
5606 var = build_fold_addr_expr (var);
5607 gimplify_assign (x, var, ilist);
5609 else
5611 x = build_sender_ref (ovar, ctx);
5612 gimplify_assign (x, var, ilist);
5614 if (!TREE_READONLY (var)
5615 /* We don't need to receive a new reference to a result
5616 or parm decl. In fact we may not store to it as we will
5617 invalidate any pending RSO and generate wrong gimple
5618 during inlining. */
5619 && !((TREE_CODE (var) == RESULT_DECL
5620 || TREE_CODE (var) == PARM_DECL)
5621 && DECL_BY_REFERENCE (var)))
5623 x = build_sender_ref (ovar, ctx);
5624 gimplify_assign (var, x, olist);
5630 /* Emit an OpenACC head marker call, encapulating the partitioning and
5631 other information that must be processed by the target compiler.
5632 Return the maximum number of dimensions the associated loop might
5633 be partitioned over. */
5635 static unsigned
5636 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5637 gimple_seq *seq, omp_context *ctx)
5639 unsigned levels = 0;
5640 unsigned tag = 0;
5641 tree gang_static = NULL_TREE;
5642 auto_vec<tree, 5> args;
5644 args.quick_push (build_int_cst
5645 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5646 args.quick_push (ddvar);
5647 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5649 switch (OMP_CLAUSE_CODE (c))
5651 case OMP_CLAUSE_GANG:
5652 tag |= OLF_DIM_GANG;
5653 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5654 /* static:* is represented by -1, and we can ignore it, as
5655 scheduling is always static. */
5656 if (gang_static && integer_minus_onep (gang_static))
5657 gang_static = NULL_TREE;
5658 levels++;
5659 break;
5661 case OMP_CLAUSE_WORKER:
5662 tag |= OLF_DIM_WORKER;
5663 levels++;
5664 break;
5666 case OMP_CLAUSE_VECTOR:
5667 tag |= OLF_DIM_VECTOR;
5668 levels++;
5669 break;
5671 case OMP_CLAUSE_SEQ:
5672 tag |= OLF_SEQ;
5673 break;
5675 case OMP_CLAUSE_AUTO:
5676 tag |= OLF_AUTO;
5677 break;
5679 case OMP_CLAUSE_INDEPENDENT:
5680 tag |= OLF_INDEPENDENT;
5681 break;
5683 case OMP_CLAUSE_TILE:
5684 tag |= OLF_TILE;
5685 break;
5687 default:
5688 continue;
5692 if (gang_static)
5694 if (DECL_P (gang_static))
5695 gang_static = build_outer_var_ref (gang_static, ctx);
5696 tag |= OLF_GANG_STATIC;
5699 /* In a parallel region, loops are implicitly INDEPENDENT. */
5700 omp_context *tgt = enclosing_target_ctx (ctx);
5701 if (!tgt || is_oacc_parallel (tgt))
5702 tag |= OLF_INDEPENDENT;
5704 if (tag & OLF_TILE)
5705 /* Tiling could use all 3 levels. */
5706 levels = 3;
5707 else
5709 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5710 Ensure at least one level, or 2 for possible auto
5711 partitioning */
5712 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5713 << OLF_DIM_BASE) | OLF_SEQ));
5715 if (levels < 1u + maybe_auto)
5716 levels = 1u + maybe_auto;
5719 args.quick_push (build_int_cst (integer_type_node, levels));
5720 args.quick_push (build_int_cst (integer_type_node, tag));
5721 if (gang_static)
5722 args.quick_push (gang_static);
5724 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5725 gimple_set_location (call, loc);
5726 gimple_set_lhs (call, ddvar);
5727 gimple_seq_add_stmt (seq, call);
5729 return levels;
5732 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5733 partitioning level of the enclosed region. */
5735 static void
5736 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5737 tree tofollow, gimple_seq *seq)
5739 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5740 : IFN_UNIQUE_OACC_TAIL_MARK);
5741 tree marker = build_int_cst (integer_type_node, marker_kind);
5742 int nargs = 2 + (tofollow != NULL_TREE);
5743 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5744 marker, ddvar, tofollow);
5745 gimple_set_location (call, loc);
5746 gimple_set_lhs (call, ddvar);
5747 gimple_seq_add_stmt (seq, call);
5750 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5751 the loop clauses, from which we extract reductions. Initialize
5752 HEAD and TAIL. */
5754 static void
5755 lower_oacc_head_tail (location_t loc, tree clauses,
5756 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5758 bool inner = false;
5759 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5760 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5762 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5763 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5764 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5766 gcc_assert (count);
5767 for (unsigned done = 1; count; count--, done++)
5769 gimple_seq fork_seq = NULL;
5770 gimple_seq join_seq = NULL;
5772 tree place = build_int_cst (integer_type_node, -1);
5773 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5774 fork_kind, ddvar, place);
5775 gimple_set_location (fork, loc);
5776 gimple_set_lhs (fork, ddvar);
5778 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5779 join_kind, ddvar, place);
5780 gimple_set_location (join, loc);
5781 gimple_set_lhs (join, ddvar);
5783 /* Mark the beginning of this level sequence. */
5784 if (inner)
5785 lower_oacc_loop_marker (loc, ddvar, true,
5786 build_int_cst (integer_type_node, count),
5787 &fork_seq);
5788 lower_oacc_loop_marker (loc, ddvar, false,
5789 build_int_cst (integer_type_node, done),
5790 &join_seq);
5792 lower_oacc_reductions (loc, clauses, place, inner,
5793 fork, join, &fork_seq, &join_seq, ctx);
5795 /* Append this level to head. */
5796 gimple_seq_add_seq (head, fork_seq);
5797 /* Prepend it to tail. */
5798 gimple_seq_add_seq (&join_seq, *tail);
5799 *tail = join_seq;
5801 inner = true;
5804 /* Mark the end of the sequence. */
5805 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5806 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5809 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5810 catch handler and return it. This prevents programs from violating the
5811 structured block semantics with throws. */
5813 static gimple_seq
5814 maybe_catch_exception (gimple_seq body)
5816 gimple *g;
5817 tree decl;
5819 if (!flag_exceptions)
5820 return body;
5822 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5823 decl = lang_hooks.eh_protect_cleanup_actions ();
5824 else
5825 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5827 g = gimple_build_eh_must_not_throw (decl);
5828 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5829 GIMPLE_TRY_CATCH);
5831 return gimple_seq_alloc_with_stmt (g);
5835 /* Routines to lower OMP directives into OMP-GIMPLE. */
5837 /* If ctx is a worksharing context inside of a cancellable parallel
5838 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5839 and conditional branch to parallel's cancel_label to handle
5840 cancellation in the implicit barrier. */
5842 static void
5843 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5845 gimple *omp_return = gimple_seq_last_stmt (*body);
5846 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5847 if (gimple_omp_return_nowait_p (omp_return))
5848 return;
5849 if (ctx->outer
5850 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5851 && ctx->outer->cancellable)
5853 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5854 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5855 tree lhs = create_tmp_var (c_bool_type);
5856 gimple_omp_return_set_lhs (omp_return, lhs);
5857 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5858 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5859 fold_convert (c_bool_type,
5860 boolean_false_node),
5861 ctx->outer->cancel_label, fallthru_label);
5862 gimple_seq_add_stmt (body, g);
5863 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5867 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5868 CTX is the enclosing OMP context for the current statement. */
5870 static void
5871 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5873 tree block, control;
5874 gimple_stmt_iterator tgsi;
5875 gomp_sections *stmt;
5876 gimple *t;
5877 gbind *new_stmt, *bind;
5878 gimple_seq ilist, dlist, olist, new_body;
5880 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5882 push_gimplify_context ();
5884 dlist = NULL;
5885 ilist = NULL;
5886 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5887 &ilist, &dlist, ctx, NULL);
5889 new_body = gimple_omp_body (stmt);
5890 gimple_omp_set_body (stmt, NULL);
5891 tgsi = gsi_start (new_body);
5892 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5894 omp_context *sctx;
5895 gimple *sec_start;
5897 sec_start = gsi_stmt (tgsi);
5898 sctx = maybe_lookup_ctx (sec_start);
5899 gcc_assert (sctx);
5901 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5902 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5903 GSI_CONTINUE_LINKING);
5904 gimple_omp_set_body (sec_start, NULL);
5906 if (gsi_one_before_end_p (tgsi))
5908 gimple_seq l = NULL;
5909 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5910 &l, ctx);
5911 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5912 gimple_omp_section_set_last (sec_start);
5915 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5916 GSI_CONTINUE_LINKING);
5919 block = make_node (BLOCK);
5920 bind = gimple_build_bind (NULL, new_body, block);
5922 olist = NULL;
5923 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5925 block = make_node (BLOCK);
5926 new_stmt = gimple_build_bind (NULL, NULL, block);
5927 gsi_replace (gsi_p, new_stmt, true);
5929 pop_gimplify_context (new_stmt);
5930 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5931 BLOCK_VARS (block) = gimple_bind_vars (bind);
5932 if (BLOCK_VARS (block))
5933 TREE_USED (block) = 1;
5935 new_body = NULL;
5936 gimple_seq_add_seq (&new_body, ilist);
5937 gimple_seq_add_stmt (&new_body, stmt);
5938 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5939 gimple_seq_add_stmt (&new_body, bind);
5941 control = create_tmp_var (unsigned_type_node, ".section");
5942 t = gimple_build_omp_continue (control, control);
5943 gimple_omp_sections_set_control (stmt, control);
5944 gimple_seq_add_stmt (&new_body, t);
5946 gimple_seq_add_seq (&new_body, olist);
5947 if (ctx->cancellable)
5948 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5949 gimple_seq_add_seq (&new_body, dlist);
5951 new_body = maybe_catch_exception (new_body);
5953 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5954 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5955 t = gimple_build_omp_return (nowait);
5956 gimple_seq_add_stmt (&new_body, t);
5957 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5959 gimple_bind_set_body (new_stmt, new_body);
5963 /* A subroutine of lower_omp_single. Expand the simple form of
5964 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5966 if (GOMP_single_start ())
5967 BODY;
5968 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5970 FIXME. It may be better to delay expanding the logic of this until
5971 pass_expand_omp. The expanded logic may make the job more difficult
5972 to a synchronization analysis pass. */
5974 static void
5975 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5977 location_t loc = gimple_location (single_stmt);
5978 tree tlabel = create_artificial_label (loc);
5979 tree flabel = create_artificial_label (loc);
5980 gimple *call, *cond;
5981 tree lhs, decl;
5983 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5984 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5985 call = gimple_build_call (decl, 0);
5986 gimple_call_set_lhs (call, lhs);
5987 gimple_seq_add_stmt (pre_p, call);
5989 cond = gimple_build_cond (EQ_EXPR, lhs,
5990 fold_convert_loc (loc, TREE_TYPE (lhs),
5991 boolean_true_node),
5992 tlabel, flabel);
5993 gimple_seq_add_stmt (pre_p, cond);
5994 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5995 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5996 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6000 /* A subroutine of lower_omp_single. Expand the simple form of
6001 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6003 #pragma omp single copyprivate (a, b, c)
6005 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6008 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6010 BODY;
6011 copyout.a = a;
6012 copyout.b = b;
6013 copyout.c = c;
6014 GOMP_single_copy_end (&copyout);
6016 else
6018 a = copyout_p->a;
6019 b = copyout_p->b;
6020 c = copyout_p->c;
6022 GOMP_barrier ();
6025 FIXME. It may be better to delay expanding the logic of this until
6026 pass_expand_omp. The expanded logic may make the job more difficult
6027 to a synchronization analysis pass. */
6029 static void
6030 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6031 omp_context *ctx)
6033 tree ptr_type, t, l0, l1, l2, bfn_decl;
6034 gimple_seq copyin_seq;
6035 location_t loc = gimple_location (single_stmt);
6037 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6039 ptr_type = build_pointer_type (ctx->record_type);
6040 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6042 l0 = create_artificial_label (loc);
6043 l1 = create_artificial_label (loc);
6044 l2 = create_artificial_label (loc);
6046 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6047 t = build_call_expr_loc (loc, bfn_decl, 0);
6048 t = fold_convert_loc (loc, ptr_type, t);
6049 gimplify_assign (ctx->receiver_decl, t, pre_p);
6051 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6052 build_int_cst (ptr_type, 0));
6053 t = build3 (COND_EXPR, void_type_node, t,
6054 build_and_jump (&l0), build_and_jump (&l1));
6055 gimplify_and_add (t, pre_p);
6057 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6059 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6061 copyin_seq = NULL;
6062 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6063 &copyin_seq, ctx);
6065 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6066 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6067 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6068 gimplify_and_add (t, pre_p);
6070 t = build_and_jump (&l2);
6071 gimplify_and_add (t, pre_p);
6073 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6075 gimple_seq_add_seq (pre_p, copyin_seq);
6077 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6081 /* Expand code for an OpenMP single directive. */
6083 static void
6084 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6086 tree block;
6087 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6088 gbind *bind;
6089 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6091 push_gimplify_context ();
6093 block = make_node (BLOCK);
6094 bind = gimple_build_bind (NULL, NULL, block);
6095 gsi_replace (gsi_p, bind, true);
6096 bind_body = NULL;
6097 dlist = NULL;
6098 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6099 &bind_body, &dlist, ctx, NULL);
6100 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6102 gimple_seq_add_stmt (&bind_body, single_stmt);
6104 if (ctx->record_type)
6105 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6106 else
6107 lower_omp_single_simple (single_stmt, &bind_body);
6109 gimple_omp_set_body (single_stmt, NULL);
6111 gimple_seq_add_seq (&bind_body, dlist);
6113 bind_body = maybe_catch_exception (bind_body);
6115 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6116 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6117 gimple *g = gimple_build_omp_return (nowait);
6118 gimple_seq_add_stmt (&bind_body_tail, g);
6119 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6120 if (ctx->record_type)
6122 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6123 tree clobber = build_constructor (ctx->record_type, NULL);
6124 TREE_THIS_VOLATILE (clobber) = 1;
6125 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6126 clobber), GSI_SAME_STMT);
6128 gimple_seq_add_seq (&bind_body, bind_body_tail);
6129 gimple_bind_set_body (bind, bind_body);
6131 pop_gimplify_context (bind);
6133 gimple_bind_append_vars (bind, ctx->block_vars);
6134 BLOCK_VARS (block) = ctx->block_vars;
6135 if (BLOCK_VARS (block))
6136 TREE_USED (block) = 1;
6140 /* Expand code for an OpenMP master directive. */
6142 static void
6143 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6145 tree block, lab = NULL, x, bfn_decl;
6146 gimple *stmt = gsi_stmt (*gsi_p);
6147 gbind *bind;
6148 location_t loc = gimple_location (stmt);
6149 gimple_seq tseq;
6151 push_gimplify_context ();
6153 block = make_node (BLOCK);
6154 bind = gimple_build_bind (NULL, NULL, block);
6155 gsi_replace (gsi_p, bind, true);
6156 gimple_bind_add_stmt (bind, stmt);
6158 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6159 x = build_call_expr_loc (loc, bfn_decl, 0);
6160 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6161 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6162 tseq = NULL;
6163 gimplify_and_add (x, &tseq);
6164 gimple_bind_add_seq (bind, tseq);
6166 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6167 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6168 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6169 gimple_omp_set_body (stmt, NULL);
6171 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6173 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6175 pop_gimplify_context (bind);
6177 gimple_bind_append_vars (bind, ctx->block_vars);
6178 BLOCK_VARS (block) = ctx->block_vars;
6182 /* Expand code for an OpenMP taskgroup directive. */
6184 static void
6185 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6187 gimple *stmt = gsi_stmt (*gsi_p);
6188 gcall *x;
6189 gbind *bind;
6190 tree block = make_node (BLOCK);
6192 bind = gimple_build_bind (NULL, NULL, block);
6193 gsi_replace (gsi_p, bind, true);
6194 gimple_bind_add_stmt (bind, stmt);
6196 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6198 gimple_bind_add_stmt (bind, x);
6200 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6201 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6202 gimple_omp_set_body (stmt, NULL);
6204 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6206 gimple_bind_append_vars (bind, ctx->block_vars);
6207 BLOCK_VARS (block) = ctx->block_vars;
6211 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6213 static void
6214 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6215 omp_context *ctx)
6217 struct omp_for_data fd;
6218 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6219 return;
6221 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6222 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6223 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6224 if (!fd.ordered)
6225 return;
6227 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6228 tree c = gimple_omp_ordered_clauses (ord_stmt);
6229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6230 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6232 /* Merge depend clauses from multiple adjacent
6233 #pragma omp ordered depend(sink:...) constructs
6234 into one #pragma omp ordered depend(sink:...), so that
6235 we can optimize them together. */
6236 gimple_stmt_iterator gsi = *gsi_p;
6237 gsi_next (&gsi);
6238 while (!gsi_end_p (gsi))
6240 gimple *stmt = gsi_stmt (gsi);
6241 if (is_gimple_debug (stmt)
6242 || gimple_code (stmt) == GIMPLE_NOP)
6244 gsi_next (&gsi);
6245 continue;
6247 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6248 break;
6249 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6250 c = gimple_omp_ordered_clauses (ord_stmt2);
6251 if (c == NULL_TREE
6252 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6253 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6254 break;
6255 while (*list_p)
6256 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6257 *list_p = c;
6258 gsi_remove (&gsi, true);
6262 /* Canonicalize sink dependence clauses into one folded clause if
6263 possible.
6265 The basic algorithm is to create a sink vector whose first
6266 element is the GCD of all the first elements, and whose remaining
6267 elements are the minimum of the subsequent columns.
6269 We ignore dependence vectors whose first element is zero because
6270 such dependencies are known to be executed by the same thread.
6272 We take into account the direction of the loop, so a minimum
6273 becomes a maximum if the loop is iterating forwards. We also
6274 ignore sink clauses where the loop direction is unknown, or where
6275 the offsets are clearly invalid because they are not a multiple
6276 of the loop increment.
6278 For example:
6280 #pragma omp for ordered(2)
6281 for (i=0; i < N; ++i)
6282 for (j=0; j < M; ++j)
6284 #pragma omp ordered \
6285 depend(sink:i-8,j-2) \
6286 depend(sink:i,j-1) \ // Completely ignored because i+0.
6287 depend(sink:i-4,j-3) \
6288 depend(sink:i-6,j-4)
6289 #pragma omp ordered depend(source)
6292 Folded clause is:
6294 depend(sink:-gcd(8,4,6),-min(2,3,4))
6295 -or-
6296 depend(sink:-2,-2)
6299 /* FIXME: Computing GCD's where the first element is zero is
6300 non-trivial in the presence of collapsed loops. Do this later. */
6301 if (fd.collapse > 1)
6302 return;
6304 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6305 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6306 tree folded_dep = NULL_TREE;
6307 /* TRUE if the first dimension's offset is negative. */
6308 bool neg_offset_p = false;
6310 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6311 unsigned int i;
6312 while ((c = *list_p) != NULL)
6314 bool remove = false;
6316 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6317 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6318 goto next_ordered_clause;
6320 tree vec;
6321 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6322 vec && TREE_CODE (vec) == TREE_LIST;
6323 vec = TREE_CHAIN (vec), ++i)
6325 gcc_assert (i < len);
6327 /* omp_extract_for_data has canonicalized the condition. */
6328 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6329 || fd.loops[i].cond_code == GT_EXPR);
6330 bool forward = fd.loops[i].cond_code == LT_EXPR;
6331 bool maybe_lexically_later = true;
6333 /* While the committee makes up its mind, bail if we have any
6334 non-constant steps. */
6335 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6336 goto lower_omp_ordered_ret;
6338 tree itype = TREE_TYPE (TREE_VALUE (vec));
6339 if (POINTER_TYPE_P (itype))
6340 itype = sizetype;
6341 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6342 TYPE_PRECISION (itype),
6343 TYPE_SIGN (itype));
6345 /* Ignore invalid offsets that are not multiples of the step. */
6346 if (!wi::multiple_of_p
6347 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6348 UNSIGNED))
6350 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6351 "ignoring sink clause with offset that is not "
6352 "a multiple of the loop step");
6353 remove = true;
6354 goto next_ordered_clause;
6357 /* Calculate the first dimension. The first dimension of
6358 the folded dependency vector is the GCD of the first
6359 elements, while ignoring any first elements whose offset
6360 is 0. */
6361 if (i == 0)
6363 /* Ignore dependence vectors whose first dimension is 0. */
6364 if (offset == 0)
6366 remove = true;
6367 goto next_ordered_clause;
6369 else
6371 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6373 error_at (OMP_CLAUSE_LOCATION (c),
6374 "first offset must be in opposite direction "
6375 "of loop iterations");
6376 goto lower_omp_ordered_ret;
6378 if (forward)
6379 offset = -offset;
6380 neg_offset_p = forward;
6381 /* Initialize the first time around. */
6382 if (folded_dep == NULL_TREE)
6384 folded_dep = c;
6385 folded_deps[0] = offset;
6387 else
6388 folded_deps[0] = wi::gcd (folded_deps[0],
6389 offset, UNSIGNED);
6392 /* Calculate minimum for the remaining dimensions. */
6393 else
6395 folded_deps[len + i - 1] = offset;
6396 if (folded_dep == c)
6397 folded_deps[i] = offset;
6398 else if (maybe_lexically_later
6399 && !wi::eq_p (folded_deps[i], offset))
6401 if (forward ^ wi::gts_p (folded_deps[i], offset))
6403 unsigned int j;
6404 folded_dep = c;
6405 for (j = 1; j <= i; j++)
6406 folded_deps[j] = folded_deps[len + j - 1];
6408 else
6409 maybe_lexically_later = false;
6413 gcc_assert (i == len);
6415 remove = true;
6417 next_ordered_clause:
6418 if (remove)
6419 *list_p = OMP_CLAUSE_CHAIN (c);
6420 else
6421 list_p = &OMP_CLAUSE_CHAIN (c);
6424 if (folded_dep)
6426 if (neg_offset_p)
6427 folded_deps[0] = -folded_deps[0];
6429 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6430 if (POINTER_TYPE_P (itype))
6431 itype = sizetype;
6433 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6434 = wide_int_to_tree (itype, folded_deps[0]);
6435 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6436 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6439 lower_omp_ordered_ret:
6441 /* Ordered without clauses is #pragma omp threads, while we want
6442 a nop instead if we remove all clauses. */
6443 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6444 gsi_replace (gsi_p, gimple_build_nop (), true);
6448 /* Expand code for an OpenMP ordered directive. */
6450 static void
6451 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6453 tree block;
6454 gimple *stmt = gsi_stmt (*gsi_p), *g;
6455 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6456 gcall *x;
6457 gbind *bind;
6458 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6459 OMP_CLAUSE_SIMD);
6460 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6461 loop. */
6462 bool maybe_simt
6463 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6464 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6465 OMP_CLAUSE_THREADS);
6467 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6468 OMP_CLAUSE_DEPEND))
6470 /* FIXME: This is needs to be moved to the expansion to verify various
6471 conditions only testable on cfg with dominators computed, and also
6472 all the depend clauses to be merged still might need to be available
6473 for the runtime checks. */
6474 if (0)
6475 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6476 return;
6479 push_gimplify_context ();
6481 block = make_node (BLOCK);
6482 bind = gimple_build_bind (NULL, NULL, block);
6483 gsi_replace (gsi_p, bind, true);
6484 gimple_bind_add_stmt (bind, stmt);
6486 if (simd)
6488 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6489 build_int_cst (NULL_TREE, threads));
6490 cfun->has_simduid_loops = true;
6492 else
6493 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6495 gimple_bind_add_stmt (bind, x);
6497 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6498 if (maybe_simt)
6500 counter = create_tmp_var (integer_type_node);
6501 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6502 gimple_call_set_lhs (g, counter);
6503 gimple_bind_add_stmt (bind, g);
6505 body = create_artificial_label (UNKNOWN_LOCATION);
6506 test = create_artificial_label (UNKNOWN_LOCATION);
6507 gimple_bind_add_stmt (bind, gimple_build_label (body));
6509 tree simt_pred = create_tmp_var (integer_type_node);
6510 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6511 gimple_call_set_lhs (g, simt_pred);
6512 gimple_bind_add_stmt (bind, g);
6514 tree t = create_artificial_label (UNKNOWN_LOCATION);
6515 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6516 gimple_bind_add_stmt (bind, g);
6518 gimple_bind_add_stmt (bind, gimple_build_label (t));
6520 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6521 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6522 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6523 gimple_omp_set_body (stmt, NULL);
6525 if (maybe_simt)
6527 gimple_bind_add_stmt (bind, gimple_build_label (test));
6528 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6529 gimple_bind_add_stmt (bind, g);
6531 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6532 tree nonneg = create_tmp_var (integer_type_node);
6533 gimple_seq tseq = NULL;
6534 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6535 gimple_bind_add_seq (bind, tseq);
6537 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6538 gimple_call_set_lhs (g, nonneg);
6539 gimple_bind_add_stmt (bind, g);
6541 tree end = create_artificial_label (UNKNOWN_LOCATION);
6542 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6543 gimple_bind_add_stmt (bind, g);
6545 gimple_bind_add_stmt (bind, gimple_build_label (end));
6547 if (simd)
6548 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6549 build_int_cst (NULL_TREE, threads));
6550 else
6551 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6553 gimple_bind_add_stmt (bind, x);
6555 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6557 pop_gimplify_context (bind);
6559 gimple_bind_append_vars (bind, ctx->block_vars);
6560 BLOCK_VARS (block) = gimple_bind_vars (bind);
6564 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6565 substitution of a couple of function calls. But in the NAMED case,
6566 requires that languages coordinate a symbol name. It is therefore
6567 best put here in common code. */
6569 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6571 static void
6572 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6574 tree block;
6575 tree name, lock, unlock;
6576 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6577 gbind *bind;
6578 location_t loc = gimple_location (stmt);
6579 gimple_seq tbody;
6581 name = gimple_omp_critical_name (stmt);
6582 if (name)
6584 tree decl;
6586 if (!critical_name_mutexes)
6587 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6589 tree *n = critical_name_mutexes->get (name);
6590 if (n == NULL)
6592 char *new_str;
6594 decl = create_tmp_var_raw (ptr_type_node);
6596 new_str = ACONCAT ((".gomp_critical_user_",
6597 IDENTIFIER_POINTER (name), NULL));
6598 DECL_NAME (decl) = get_identifier (new_str);
6599 TREE_PUBLIC (decl) = 1;
6600 TREE_STATIC (decl) = 1;
6601 DECL_COMMON (decl) = 1;
6602 DECL_ARTIFICIAL (decl) = 1;
6603 DECL_IGNORED_P (decl) = 1;
6605 varpool_node::finalize_decl (decl);
6607 critical_name_mutexes->put (name, decl);
6609 else
6610 decl = *n;
6612 /* If '#pragma omp critical' is inside offloaded region or
6613 inside function marked as offloadable, the symbol must be
6614 marked as offloadable too. */
6615 omp_context *octx;
6616 if (cgraph_node::get (current_function_decl)->offloadable)
6617 varpool_node::get_create (decl)->offloadable = 1;
6618 else
6619 for (octx = ctx->outer; octx; octx = octx->outer)
6620 if (is_gimple_omp_offloaded (octx->stmt))
6622 varpool_node::get_create (decl)->offloadable = 1;
6623 break;
6626 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6627 lock = build_call_expr_loc (loc, lock, 1,
6628 build_fold_addr_expr_loc (loc, decl));
6630 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6631 unlock = build_call_expr_loc (loc, unlock, 1,
6632 build_fold_addr_expr_loc (loc, decl));
6634 else
6636 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6637 lock = build_call_expr_loc (loc, lock, 0);
6639 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6640 unlock = build_call_expr_loc (loc, unlock, 0);
6643 push_gimplify_context ();
6645 block = make_node (BLOCK);
6646 bind = gimple_build_bind (NULL, NULL, block);
6647 gsi_replace (gsi_p, bind, true);
6648 gimple_bind_add_stmt (bind, stmt);
6650 tbody = gimple_bind_body (bind);
6651 gimplify_and_add (lock, &tbody);
6652 gimple_bind_set_body (bind, tbody);
6654 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6655 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6656 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6657 gimple_omp_set_body (stmt, NULL);
6659 tbody = gimple_bind_body (bind);
6660 gimplify_and_add (unlock, &tbody);
6661 gimple_bind_set_body (bind, tbody);
6663 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6665 pop_gimplify_context (bind);
6666 gimple_bind_append_vars (bind, ctx->block_vars);
6667 BLOCK_VARS (block) = gimple_bind_vars (bind);
6670 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6671 for a lastprivate clause. Given a loop control predicate of (V
6672 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6673 is appended to *DLIST, iterator initialization is appended to
6674 *BODY_P. */
6676 static void
6677 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6678 gimple_seq *dlist, struct omp_context *ctx)
6680 tree clauses, cond, vinit;
6681 enum tree_code cond_code;
6682 gimple_seq stmts;
6684 cond_code = fd->loop.cond_code;
6685 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6687 /* When possible, use a strict equality expression. This can let VRP
6688 type optimizations deduce the value and remove a copy. */
6689 if (tree_fits_shwi_p (fd->loop.step))
6691 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6692 if (step == 1 || step == -1)
6693 cond_code = EQ_EXPR;
6696 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6697 || gimple_omp_for_grid_phony (fd->for_stmt))
6698 cond = omp_grid_lastprivate_predicate (fd);
6699 else
6701 tree n2 = fd->loop.n2;
6702 if (fd->collapse > 1
6703 && TREE_CODE (n2) != INTEGER_CST
6704 && gimple_omp_for_combined_into_p (fd->for_stmt))
6706 struct omp_context *taskreg_ctx = NULL;
6707 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6709 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6710 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6711 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6713 if (gimple_omp_for_combined_into_p (gfor))
6715 gcc_assert (ctx->outer->outer
6716 && is_parallel_ctx (ctx->outer->outer));
6717 taskreg_ctx = ctx->outer->outer;
6719 else
6721 struct omp_for_data outer_fd;
6722 omp_extract_for_data (gfor, &outer_fd, NULL);
6723 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6726 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6727 taskreg_ctx = ctx->outer->outer;
6729 else if (is_taskreg_ctx (ctx->outer))
6730 taskreg_ctx = ctx->outer;
6731 if (taskreg_ctx)
6733 int i;
6734 tree taskreg_clauses
6735 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6736 tree innerc = omp_find_clause (taskreg_clauses,
6737 OMP_CLAUSE__LOOPTEMP_);
6738 gcc_assert (innerc);
6739 for (i = 0; i < fd->collapse; i++)
6741 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6742 OMP_CLAUSE__LOOPTEMP_);
6743 gcc_assert (innerc);
6745 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6746 OMP_CLAUSE__LOOPTEMP_);
6747 if (innerc)
6748 n2 = fold_convert (TREE_TYPE (n2),
6749 lookup_decl (OMP_CLAUSE_DECL (innerc),
6750 taskreg_ctx));
6753 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6756 clauses = gimple_omp_for_clauses (fd->for_stmt);
6757 stmts = NULL;
6758 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6759 if (!gimple_seq_empty_p (stmts))
6761 gimple_seq_add_seq (&stmts, *dlist);
6762 *dlist = stmts;
6764 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6765 vinit = fd->loop.n1;
6766 if (cond_code == EQ_EXPR
6767 && tree_fits_shwi_p (fd->loop.n2)
6768 && ! integer_zerop (fd->loop.n2))
6769 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6770 else
6771 vinit = unshare_expr (vinit);
6773 /* Initialize the iterator variable, so that threads that don't execute
6774 any iterations don't execute the lastprivate clauses by accident. */
6775 gimplify_assign (fd->loop.v, vinit, body_p);
6780 /* Lower code for an OMP loop directive. */
6782 static void
6783 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6785 tree *rhs_p, block;
6786 struct omp_for_data fd, *fdp = NULL;
6787 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6788 gbind *new_stmt;
6789 gimple_seq omp_for_body, body, dlist;
6790 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6791 size_t i;
6793 push_gimplify_context ();
6795 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6797 block = make_node (BLOCK);
6798 new_stmt = gimple_build_bind (NULL, NULL, block);
6799 /* Replace at gsi right away, so that 'stmt' is no member
6800 of a sequence anymore as we're going to add to a different
6801 one below. */
6802 gsi_replace (gsi_p, new_stmt, true);
6804 /* Move declaration of temporaries in the loop body before we make
6805 it go away. */
6806 omp_for_body = gimple_omp_body (stmt);
6807 if (!gimple_seq_empty_p (omp_for_body)
6808 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6810 gbind *inner_bind
6811 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6812 tree vars = gimple_bind_vars (inner_bind);
6813 gimple_bind_append_vars (new_stmt, vars);
6814 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6815 keep them on the inner_bind and it's block. */
6816 gimple_bind_set_vars (inner_bind, NULL_TREE);
6817 if (gimple_bind_block (inner_bind))
6818 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6821 if (gimple_omp_for_combined_into_p (stmt))
6823 omp_extract_for_data (stmt, &fd, NULL);
6824 fdp = &fd;
6826 /* We need two temporaries with fd.loop.v type (istart/iend)
6827 and then (fd.collapse - 1) temporaries with the same
6828 type for count2 ... countN-1 vars if not constant. */
6829 size_t count = 2;
6830 tree type = fd.iter_type;
6831 if (fd.collapse > 1
6832 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6833 count += fd.collapse - 1;
6834 bool taskreg_for
6835 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6836 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6837 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6838 tree simtc = NULL;
6839 tree clauses = *pc;
6840 if (taskreg_for)
6841 outerc
6842 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6843 OMP_CLAUSE__LOOPTEMP_);
6844 if (ctx->simt_stmt)
6845 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6846 OMP_CLAUSE__LOOPTEMP_);
6847 for (i = 0; i < count; i++)
6849 tree temp;
6850 if (taskreg_for)
6852 gcc_assert (outerc);
6853 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6854 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6855 OMP_CLAUSE__LOOPTEMP_);
6857 else
6859 /* If there are 2 adjacent SIMD stmts, one with _simt_
6860 clause, another without, make sure they have the same
6861 decls in _looptemp_ clauses, because the outer stmt
6862 they are combined into will look up just one inner_stmt. */
6863 if (ctx->simt_stmt)
6864 temp = OMP_CLAUSE_DECL (simtc);
6865 else
6866 temp = create_tmp_var (type);
6867 insert_decl_map (&ctx->outer->cb, temp, temp);
6869 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6870 OMP_CLAUSE_DECL (*pc) = temp;
6871 pc = &OMP_CLAUSE_CHAIN (*pc);
6872 if (ctx->simt_stmt)
6873 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6874 OMP_CLAUSE__LOOPTEMP_);
6876 *pc = clauses;
6879 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6880 dlist = NULL;
6881 body = NULL;
6882 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6883 fdp);
6884 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6886 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6888 /* Lower the header expressions. At this point, we can assume that
6889 the header is of the form:
6891 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6893 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6894 using the .omp_data_s mapping, if needed. */
6895 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6897 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6898 if (!is_gimple_min_invariant (*rhs_p))
6899 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6901 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6902 if (!is_gimple_min_invariant (*rhs_p))
6903 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6905 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6906 if (!is_gimple_min_invariant (*rhs_p))
6907 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6910 /* Once lowered, extract the bounds and clauses. */
6911 omp_extract_for_data (stmt, &fd, NULL);
6913 if (is_gimple_omp_oacc (ctx->stmt)
6914 && !ctx_in_oacc_kernels_region (ctx))
6915 lower_oacc_head_tail (gimple_location (stmt),
6916 gimple_omp_for_clauses (stmt),
6917 &oacc_head, &oacc_tail, ctx);
6919 /* Add OpenACC partitioning and reduction markers just before the loop. */
6920 if (oacc_head)
6921 gimple_seq_add_seq (&body, oacc_head);
6923 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6925 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6926 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6927 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6928 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6930 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6931 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6932 OMP_CLAUSE_LINEAR_STEP (c)
6933 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6934 ctx);
6937 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6938 && gimple_omp_for_grid_phony (stmt));
6939 if (!phony_loop)
6940 gimple_seq_add_stmt (&body, stmt);
6941 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6943 if (!phony_loop)
6944 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6945 fd.loop.v));
6947 /* After the loop, add exit clauses. */
6948 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6950 if (ctx->cancellable)
6951 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6953 gimple_seq_add_seq (&body, dlist);
6955 body = maybe_catch_exception (body);
6957 if (!phony_loop)
6959 /* Region exit marker goes at the end of the loop body. */
6960 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6961 maybe_add_implicit_barrier_cancel (ctx, &body);
6964 /* Add OpenACC joining and reduction markers just after the loop. */
6965 if (oacc_tail)
6966 gimple_seq_add_seq (&body, oacc_tail);
6968 pop_gimplify_context (new_stmt);
6970 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6971 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6972 if (BLOCK_VARS (block))
6973 TREE_USED (block) = 1;
6975 gimple_bind_set_body (new_stmt, body);
6976 gimple_omp_set_body (stmt, NULL);
6977 gimple_omp_for_set_pre_body (stmt, NULL);
6980 /* Callback for walk_stmts. Check if the current statement only contains
6981 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6983 static tree
6984 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6985 bool *handled_ops_p,
6986 struct walk_stmt_info *wi)
6988 int *info = (int *) wi->info;
6989 gimple *stmt = gsi_stmt (*gsi_p);
6991 *handled_ops_p = true;
6992 switch (gimple_code (stmt))
6994 WALK_SUBSTMTS;
6996 case GIMPLE_OMP_FOR:
6997 case GIMPLE_OMP_SECTIONS:
6998 *info = *info == 0 ? 1 : -1;
6999 break;
7000 default:
7001 *info = -1;
7002 break;
7004 return NULL;
7007 struct omp_taskcopy_context
7009 /* This field must be at the beginning, as we do "inheritance": Some
7010 callback functions for tree-inline.c (e.g., omp_copy_decl)
7011 receive a copy_body_data pointer that is up-casted to an
7012 omp_context pointer. */
7013 copy_body_data cb;
7014 omp_context *ctx;
7017 static tree
7018 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7020 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7022 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7023 return create_tmp_var (TREE_TYPE (var));
7025 return var;
7028 static tree
7029 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7031 tree name, new_fields = NULL, type, f;
7033 type = lang_hooks.types.make_type (RECORD_TYPE);
7034 name = DECL_NAME (TYPE_NAME (orig_type));
7035 name = build_decl (gimple_location (tcctx->ctx->stmt),
7036 TYPE_DECL, name, type);
7037 TYPE_NAME (type) = name;
7039 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7041 tree new_f = copy_node (f);
7042 DECL_CONTEXT (new_f) = type;
7043 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7044 TREE_CHAIN (new_f) = new_fields;
7045 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7046 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7047 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7048 &tcctx->cb, NULL);
7049 new_fields = new_f;
7050 tcctx->cb.decl_map->put (f, new_f);
7052 TYPE_FIELDS (type) = nreverse (new_fields);
7053 layout_type (type);
7054 return type;
7057 /* Create task copyfn. */
7059 static void
7060 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7062 struct function *child_cfun;
7063 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7064 tree record_type, srecord_type, bind, list;
7065 bool record_needs_remap = false, srecord_needs_remap = false;
7066 splay_tree_node n;
7067 struct omp_taskcopy_context tcctx;
7068 location_t loc = gimple_location (task_stmt);
7070 child_fn = gimple_omp_task_copy_fn (task_stmt);
7071 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7072 gcc_assert (child_cfun->cfg == NULL);
7073 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7075 /* Reset DECL_CONTEXT on function arguments. */
7076 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7077 DECL_CONTEXT (t) = child_fn;
7079 /* Populate the function. */
7080 push_gimplify_context ();
7081 push_cfun (child_cfun);
7083 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7084 TREE_SIDE_EFFECTS (bind) = 1;
7085 list = NULL;
7086 DECL_SAVED_TREE (child_fn) = bind;
7087 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7089 /* Remap src and dst argument types if needed. */
7090 record_type = ctx->record_type;
7091 srecord_type = ctx->srecord_type;
7092 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7093 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7095 record_needs_remap = true;
7096 break;
7098 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7099 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7101 srecord_needs_remap = true;
7102 break;
7105 if (record_needs_remap || srecord_needs_remap)
7107 memset (&tcctx, '\0', sizeof (tcctx));
7108 tcctx.cb.src_fn = ctx->cb.src_fn;
7109 tcctx.cb.dst_fn = child_fn;
7110 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7111 gcc_checking_assert (tcctx.cb.src_node);
7112 tcctx.cb.dst_node = tcctx.cb.src_node;
7113 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7114 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7115 tcctx.cb.eh_lp_nr = 0;
7116 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7117 tcctx.cb.decl_map = new hash_map<tree, tree>;
7118 tcctx.ctx = ctx;
7120 if (record_needs_remap)
7121 record_type = task_copyfn_remap_type (&tcctx, record_type);
7122 if (srecord_needs_remap)
7123 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7125 else
7126 tcctx.cb.decl_map = NULL;
7128 arg = DECL_ARGUMENTS (child_fn);
7129 TREE_TYPE (arg) = build_pointer_type (record_type);
7130 sarg = DECL_CHAIN (arg);
7131 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7133 /* First pass: initialize temporaries used in record_type and srecord_type
7134 sizes and field offsets. */
7135 if (tcctx.cb.decl_map)
7136 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7139 tree *p;
7141 decl = OMP_CLAUSE_DECL (c);
7142 p = tcctx.cb.decl_map->get (decl);
7143 if (p == NULL)
7144 continue;
7145 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7146 sf = (tree) n->value;
7147 sf = *tcctx.cb.decl_map->get (sf);
7148 src = build_simple_mem_ref_loc (loc, sarg);
7149 src = omp_build_component_ref (src, sf);
7150 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7151 append_to_statement_list (t, &list);
7154 /* Second pass: copy shared var pointers and copy construct non-VLA
7155 firstprivate vars. */
7156 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7157 switch (OMP_CLAUSE_CODE (c))
7159 splay_tree_key key;
7160 case OMP_CLAUSE_SHARED:
7161 decl = OMP_CLAUSE_DECL (c);
7162 key = (splay_tree_key) decl;
7163 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7164 key = (splay_tree_key) &DECL_UID (decl);
7165 n = splay_tree_lookup (ctx->field_map, key);
7166 if (n == NULL)
7167 break;
7168 f = (tree) n->value;
7169 if (tcctx.cb.decl_map)
7170 f = *tcctx.cb.decl_map->get (f);
7171 n = splay_tree_lookup (ctx->sfield_map, key);
7172 sf = (tree) n->value;
7173 if (tcctx.cb.decl_map)
7174 sf = *tcctx.cb.decl_map->get (sf);
7175 src = build_simple_mem_ref_loc (loc, sarg);
7176 src = omp_build_component_ref (src, sf);
7177 dst = build_simple_mem_ref_loc (loc, arg);
7178 dst = omp_build_component_ref (dst, f);
7179 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7180 append_to_statement_list (t, &list);
7181 break;
7182 case OMP_CLAUSE_FIRSTPRIVATE:
7183 decl = OMP_CLAUSE_DECL (c);
7184 if (is_variable_sized (decl))
7185 break;
7186 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7187 if (n == NULL)
7188 break;
7189 f = (tree) n->value;
7190 if (tcctx.cb.decl_map)
7191 f = *tcctx.cb.decl_map->get (f);
7192 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7193 if (n != NULL)
7195 sf = (tree) n->value;
7196 if (tcctx.cb.decl_map)
7197 sf = *tcctx.cb.decl_map->get (sf);
7198 src = build_simple_mem_ref_loc (loc, sarg);
7199 src = omp_build_component_ref (src, sf);
7200 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7201 src = build_simple_mem_ref_loc (loc, src);
7203 else
7204 src = decl;
7205 dst = build_simple_mem_ref_loc (loc, arg);
7206 dst = omp_build_component_ref (dst, f);
7207 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7208 append_to_statement_list (t, &list);
7209 break;
7210 case OMP_CLAUSE_PRIVATE:
7211 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7212 break;
7213 decl = OMP_CLAUSE_DECL (c);
7214 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7215 f = (tree) n->value;
7216 if (tcctx.cb.decl_map)
7217 f = *tcctx.cb.decl_map->get (f);
7218 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7219 if (n != NULL)
7221 sf = (tree) n->value;
7222 if (tcctx.cb.decl_map)
7223 sf = *tcctx.cb.decl_map->get (sf);
7224 src = build_simple_mem_ref_loc (loc, sarg);
7225 src = omp_build_component_ref (src, sf);
7226 if (use_pointer_for_field (decl, NULL))
7227 src = build_simple_mem_ref_loc (loc, src);
7229 else
7230 src = decl;
7231 dst = build_simple_mem_ref_loc (loc, arg);
7232 dst = omp_build_component_ref (dst, f);
7233 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7234 append_to_statement_list (t, &list);
7235 break;
7236 default:
7237 break;
7240 /* Last pass: handle VLA firstprivates. */
7241 if (tcctx.cb.decl_map)
7242 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7243 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7245 tree ind, ptr, df;
7247 decl = OMP_CLAUSE_DECL (c);
7248 if (!is_variable_sized (decl))
7249 continue;
7250 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7251 if (n == NULL)
7252 continue;
7253 f = (tree) n->value;
7254 f = *tcctx.cb.decl_map->get (f);
7255 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7256 ind = DECL_VALUE_EXPR (decl);
7257 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7258 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7259 n = splay_tree_lookup (ctx->sfield_map,
7260 (splay_tree_key) TREE_OPERAND (ind, 0));
7261 sf = (tree) n->value;
7262 sf = *tcctx.cb.decl_map->get (sf);
7263 src = build_simple_mem_ref_loc (loc, sarg);
7264 src = omp_build_component_ref (src, sf);
7265 src = build_simple_mem_ref_loc (loc, src);
7266 dst = build_simple_mem_ref_loc (loc, arg);
7267 dst = omp_build_component_ref (dst, f);
7268 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7269 append_to_statement_list (t, &list);
7270 n = splay_tree_lookup (ctx->field_map,
7271 (splay_tree_key) TREE_OPERAND (ind, 0));
7272 df = (tree) n->value;
7273 df = *tcctx.cb.decl_map->get (df);
7274 ptr = build_simple_mem_ref_loc (loc, arg);
7275 ptr = omp_build_component_ref (ptr, df);
7276 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7277 build_fold_addr_expr_loc (loc, dst));
7278 append_to_statement_list (t, &list);
7281 t = build1 (RETURN_EXPR, void_type_node, NULL);
7282 append_to_statement_list (t, &list);
7284 if (tcctx.cb.decl_map)
7285 delete tcctx.cb.decl_map;
7286 pop_gimplify_context (NULL);
7287 BIND_EXPR_BODY (bind) = list;
7288 pop_cfun ();
7291 static void
7292 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7294 tree c, clauses;
7295 gimple *g;
7296 size_t n_in = 0, n_out = 0, idx = 2, i;
7298 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7299 gcc_assert (clauses);
7300 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7301 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7302 switch (OMP_CLAUSE_DEPEND_KIND (c))
7304 case OMP_CLAUSE_DEPEND_IN:
7305 n_in++;
7306 break;
7307 case OMP_CLAUSE_DEPEND_OUT:
7308 case OMP_CLAUSE_DEPEND_INOUT:
7309 n_out++;
7310 break;
7311 case OMP_CLAUSE_DEPEND_SOURCE:
7312 case OMP_CLAUSE_DEPEND_SINK:
7313 /* FALLTHRU */
7314 default:
7315 gcc_unreachable ();
7317 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7318 tree array = create_tmp_var (type);
7319 TREE_ADDRESSABLE (array) = 1;
7320 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7321 NULL_TREE);
7322 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7323 gimple_seq_add_stmt (iseq, g);
7324 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7325 NULL_TREE);
7326 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7327 gimple_seq_add_stmt (iseq, g);
7328 for (i = 0; i < 2; i++)
7330 if ((i ? n_in : n_out) == 0)
7331 continue;
7332 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7334 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7336 tree t = OMP_CLAUSE_DECL (c);
7337 t = fold_convert (ptr_type_node, t);
7338 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7339 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7340 NULL_TREE, NULL_TREE);
7341 g = gimple_build_assign (r, t);
7342 gimple_seq_add_stmt (iseq, g);
7345 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7346 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7347 OMP_CLAUSE_CHAIN (c) = *pclauses;
7348 *pclauses = c;
7349 tree clobber = build_constructor (type, NULL);
7350 TREE_THIS_VOLATILE (clobber) = 1;
7351 g = gimple_build_assign (array, clobber);
7352 gimple_seq_add_stmt (oseq, g);
7355 /* Lower the OpenMP parallel or task directive in the current statement
7356 in GSI_P. CTX holds context information for the directive. */
7358 static void
7359 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7361 tree clauses;
7362 tree child_fn, t;
7363 gimple *stmt = gsi_stmt (*gsi_p);
7364 gbind *par_bind, *bind, *dep_bind = NULL;
7365 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7366 location_t loc = gimple_location (stmt);
7368 clauses = gimple_omp_taskreg_clauses (stmt);
7369 par_bind
7370 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7371 par_body = gimple_bind_body (par_bind);
7372 child_fn = ctx->cb.dst_fn;
7373 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7374 && !gimple_omp_parallel_combined_p (stmt))
7376 struct walk_stmt_info wi;
7377 int ws_num = 0;
7379 memset (&wi, 0, sizeof (wi));
7380 wi.info = &ws_num;
7381 wi.val_only = true;
7382 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7383 if (ws_num == 1)
7384 gimple_omp_parallel_set_combined_p (stmt, true);
7386 gimple_seq dep_ilist = NULL;
7387 gimple_seq dep_olist = NULL;
7388 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7389 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7391 push_gimplify_context ();
7392 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7393 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7394 &dep_ilist, &dep_olist);
7397 if (ctx->srecord_type)
7398 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7400 push_gimplify_context ();
7402 par_olist = NULL;
7403 par_ilist = NULL;
7404 par_rlist = NULL;
7405 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7406 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7407 if (phony_construct && ctx->record_type)
7409 gcc_checking_assert (!ctx->receiver_decl);
7410 ctx->receiver_decl = create_tmp_var
7411 (build_reference_type (ctx->record_type), ".omp_rec");
7413 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7414 lower_omp (&par_body, ctx);
7415 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7416 lower_reduction_clauses (clauses, &par_rlist, ctx);
7418 /* Declare all the variables created by mapping and the variables
7419 declared in the scope of the parallel body. */
7420 record_vars_into (ctx->block_vars, child_fn);
7421 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7423 if (ctx->record_type)
7425 ctx->sender_decl
7426 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7427 : ctx->record_type, ".omp_data_o");
7428 DECL_NAMELESS (ctx->sender_decl) = 1;
7429 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7430 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7433 olist = NULL;
7434 ilist = NULL;
7435 lower_send_clauses (clauses, &ilist, &olist, ctx);
7436 lower_send_shared_vars (&ilist, &olist, ctx);
7438 if (ctx->record_type)
7440 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7441 TREE_THIS_VOLATILE (clobber) = 1;
7442 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7443 clobber));
7446 /* Once all the expansions are done, sequence all the different
7447 fragments inside gimple_omp_body. */
7449 new_body = NULL;
7451 if (ctx->record_type)
7453 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7454 /* fixup_child_record_type might have changed receiver_decl's type. */
7455 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7456 gimple_seq_add_stmt (&new_body,
7457 gimple_build_assign (ctx->receiver_decl, t));
7460 gimple_seq_add_seq (&new_body, par_ilist);
7461 gimple_seq_add_seq (&new_body, par_body);
7462 gimple_seq_add_seq (&new_body, par_rlist);
7463 if (ctx->cancellable)
7464 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7465 gimple_seq_add_seq (&new_body, par_olist);
7466 new_body = maybe_catch_exception (new_body);
7467 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7468 gimple_seq_add_stmt (&new_body,
7469 gimple_build_omp_continue (integer_zero_node,
7470 integer_zero_node));
7471 if (!phony_construct)
7473 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7474 gimple_omp_set_body (stmt, new_body);
7477 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7478 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7479 gimple_bind_add_seq (bind, ilist);
7480 if (!phony_construct)
7481 gimple_bind_add_stmt (bind, stmt);
7482 else
7483 gimple_bind_add_seq (bind, new_body);
7484 gimple_bind_add_seq (bind, olist);
7486 pop_gimplify_context (NULL);
7488 if (dep_bind)
7490 gimple_bind_add_seq (dep_bind, dep_ilist);
7491 gimple_bind_add_stmt (dep_bind, bind);
7492 gimple_bind_add_seq (dep_bind, dep_olist);
7493 pop_gimplify_context (dep_bind);
7497 /* Lower the GIMPLE_OMP_TARGET in the current statement
7498 in GSI_P. CTX holds context information for the directive. */
7500 static void
7501 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7503 tree clauses;
7504 tree child_fn, t, c;
7505 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7506 gbind *tgt_bind, *bind, *dep_bind = NULL;
7507 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7508 location_t loc = gimple_location (stmt);
7509 bool offloaded, data_region;
7510 unsigned int map_cnt = 0;
7512 offloaded = is_gimple_omp_offloaded (stmt);
7513 switch (gimple_omp_target_kind (stmt))
7515 case GF_OMP_TARGET_KIND_REGION:
7516 case GF_OMP_TARGET_KIND_UPDATE:
7517 case GF_OMP_TARGET_KIND_ENTER_DATA:
7518 case GF_OMP_TARGET_KIND_EXIT_DATA:
7519 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7520 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7521 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7522 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7523 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7524 data_region = false;
7525 break;
7526 case GF_OMP_TARGET_KIND_DATA:
7527 case GF_OMP_TARGET_KIND_OACC_DATA:
7528 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7529 data_region = true;
7530 break;
7531 default:
7532 gcc_unreachable ();
7535 clauses = gimple_omp_target_clauses (stmt);
7537 gimple_seq dep_ilist = NULL;
7538 gimple_seq dep_olist = NULL;
7539 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7541 push_gimplify_context ();
7542 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7543 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7544 &dep_ilist, &dep_olist);
7547 tgt_bind = NULL;
7548 tgt_body = NULL;
7549 if (offloaded)
7551 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7552 tgt_body = gimple_bind_body (tgt_bind);
7554 else if (data_region)
7555 tgt_body = gimple_omp_body (stmt);
7556 child_fn = ctx->cb.dst_fn;
7558 push_gimplify_context ();
7559 fplist = NULL;
7561 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7562 switch (OMP_CLAUSE_CODE (c))
7564 tree var, x;
7566 default:
7567 break;
7568 case OMP_CLAUSE_MAP:
7569 #if CHECKING_P
7570 /* First check what we're prepared to handle in the following. */
7571 switch (OMP_CLAUSE_MAP_KIND (c))
7573 case GOMP_MAP_ALLOC:
7574 case GOMP_MAP_TO:
7575 case GOMP_MAP_FROM:
7576 case GOMP_MAP_TOFROM:
7577 case GOMP_MAP_POINTER:
7578 case GOMP_MAP_TO_PSET:
7579 case GOMP_MAP_DELETE:
7580 case GOMP_MAP_RELEASE:
7581 case GOMP_MAP_ALWAYS_TO:
7582 case GOMP_MAP_ALWAYS_FROM:
7583 case GOMP_MAP_ALWAYS_TOFROM:
7584 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7585 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7586 case GOMP_MAP_STRUCT:
7587 case GOMP_MAP_ALWAYS_POINTER:
7588 break;
7589 case GOMP_MAP_FORCE_ALLOC:
7590 case GOMP_MAP_FORCE_TO:
7591 case GOMP_MAP_FORCE_FROM:
7592 case GOMP_MAP_FORCE_TOFROM:
7593 case GOMP_MAP_FORCE_PRESENT:
7594 case GOMP_MAP_FORCE_DEVICEPTR:
7595 case GOMP_MAP_DEVICE_RESIDENT:
7596 case GOMP_MAP_LINK:
7597 gcc_assert (is_gimple_omp_oacc (stmt));
7598 break;
7599 default:
7600 gcc_unreachable ();
7602 #endif
7603 /* FALLTHRU */
7604 case OMP_CLAUSE_TO:
7605 case OMP_CLAUSE_FROM:
7606 oacc_firstprivate:
7607 var = OMP_CLAUSE_DECL (c);
7608 if (!DECL_P (var))
7610 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7611 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7612 && (OMP_CLAUSE_MAP_KIND (c)
7613 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7614 map_cnt++;
7615 continue;
7618 if (DECL_SIZE (var)
7619 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7621 tree var2 = DECL_VALUE_EXPR (var);
7622 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7623 var2 = TREE_OPERAND (var2, 0);
7624 gcc_assert (DECL_P (var2));
7625 var = var2;
7628 if (offloaded
7629 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7630 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7631 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7633 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7636 && varpool_node::get_create (var)->offloadable)
7637 continue;
7639 tree type = build_pointer_type (TREE_TYPE (var));
7640 tree new_var = lookup_decl (var, ctx);
7641 x = create_tmp_var_raw (type, get_name (new_var));
7642 gimple_add_tmp_var (x);
7643 x = build_simple_mem_ref (x);
7644 SET_DECL_VALUE_EXPR (new_var, x);
7645 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7647 continue;
7650 if (!maybe_lookup_field (var, ctx))
7651 continue;
7653 /* Don't remap oacc parallel reduction variables, because the
7654 intermediate result must be local to each gang. */
7655 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7656 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7658 x = build_receiver_ref (var, true, ctx);
7659 tree new_var = lookup_decl (var, ctx);
7661 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7662 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7663 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7664 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7665 x = build_simple_mem_ref (x);
7666 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7668 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7669 if (omp_is_reference (new_var))
7671 /* Create a local object to hold the instance
7672 value. */
7673 tree type = TREE_TYPE (TREE_TYPE (new_var));
7674 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7675 tree inst = create_tmp_var (type, id);
7676 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7677 x = build_fold_addr_expr (inst);
7679 gimplify_assign (new_var, x, &fplist);
7681 else if (DECL_P (new_var))
7683 SET_DECL_VALUE_EXPR (new_var, x);
7684 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7686 else
7687 gcc_unreachable ();
7689 map_cnt++;
7690 break;
7692 case OMP_CLAUSE_FIRSTPRIVATE:
7693 if (is_oacc_parallel (ctx))
7694 goto oacc_firstprivate;
7695 map_cnt++;
7696 var = OMP_CLAUSE_DECL (c);
7697 if (!omp_is_reference (var)
7698 && !is_gimple_reg_type (TREE_TYPE (var)))
7700 tree new_var = lookup_decl (var, ctx);
7701 if (is_variable_sized (var))
7703 tree pvar = DECL_VALUE_EXPR (var);
7704 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7705 pvar = TREE_OPERAND (pvar, 0);
7706 gcc_assert (DECL_P (pvar));
7707 tree new_pvar = lookup_decl (pvar, ctx);
7708 x = build_fold_indirect_ref (new_pvar);
7709 TREE_THIS_NOTRAP (x) = 1;
7711 else
7712 x = build_receiver_ref (var, true, ctx);
7713 SET_DECL_VALUE_EXPR (new_var, x);
7714 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7716 break;
7718 case OMP_CLAUSE_PRIVATE:
7719 if (is_gimple_omp_oacc (ctx->stmt))
7720 break;
7721 var = OMP_CLAUSE_DECL (c);
7722 if (is_variable_sized (var))
7724 tree new_var = lookup_decl (var, ctx);
7725 tree pvar = DECL_VALUE_EXPR (var);
7726 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7727 pvar = TREE_OPERAND (pvar, 0);
7728 gcc_assert (DECL_P (pvar));
7729 tree new_pvar = lookup_decl (pvar, ctx);
7730 x = build_fold_indirect_ref (new_pvar);
7731 TREE_THIS_NOTRAP (x) = 1;
7732 SET_DECL_VALUE_EXPR (new_var, x);
7733 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7735 break;
7737 case OMP_CLAUSE_USE_DEVICE_PTR:
7738 case OMP_CLAUSE_IS_DEVICE_PTR:
7739 var = OMP_CLAUSE_DECL (c);
7740 map_cnt++;
7741 if (is_variable_sized (var))
7743 tree new_var = lookup_decl (var, ctx);
7744 tree pvar = DECL_VALUE_EXPR (var);
7745 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7746 pvar = TREE_OPERAND (pvar, 0);
7747 gcc_assert (DECL_P (pvar));
7748 tree new_pvar = lookup_decl (pvar, ctx);
7749 x = build_fold_indirect_ref (new_pvar);
7750 TREE_THIS_NOTRAP (x) = 1;
7751 SET_DECL_VALUE_EXPR (new_var, x);
7752 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7754 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7756 tree new_var = lookup_decl (var, ctx);
7757 tree type = build_pointer_type (TREE_TYPE (var));
7758 x = create_tmp_var_raw (type, get_name (new_var));
7759 gimple_add_tmp_var (x);
7760 x = build_simple_mem_ref (x);
7761 SET_DECL_VALUE_EXPR (new_var, x);
7762 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7764 else
7766 tree new_var = lookup_decl (var, ctx);
7767 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7768 gimple_add_tmp_var (x);
7769 SET_DECL_VALUE_EXPR (new_var, x);
7770 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7772 break;
7775 if (offloaded)
7777 target_nesting_level++;
7778 lower_omp (&tgt_body, ctx);
7779 target_nesting_level--;
7781 else if (data_region)
7782 lower_omp (&tgt_body, ctx);
7784 if (offloaded)
7786 /* Declare all the variables created by mapping and the variables
7787 declared in the scope of the target body. */
7788 record_vars_into (ctx->block_vars, child_fn);
7789 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7792 olist = NULL;
7793 ilist = NULL;
7794 if (ctx->record_type)
7796 ctx->sender_decl
7797 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7798 DECL_NAMELESS (ctx->sender_decl) = 1;
7799 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7800 t = make_tree_vec (3);
7801 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7802 TREE_VEC_ELT (t, 1)
7803 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7804 ".omp_data_sizes");
7805 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7806 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7807 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7808 tree tkind_type = short_unsigned_type_node;
7809 int talign_shift = 8;
7810 TREE_VEC_ELT (t, 2)
7811 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7812 ".omp_data_kinds");
7813 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7814 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7815 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7816 gimple_omp_target_set_data_arg (stmt, t);
7818 vec<constructor_elt, va_gc> *vsize;
7819 vec<constructor_elt, va_gc> *vkind;
7820 vec_alloc (vsize, map_cnt);
7821 vec_alloc (vkind, map_cnt);
7822 unsigned int map_idx = 0;
7824 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7825 switch (OMP_CLAUSE_CODE (c))
7827 tree ovar, nc, s, purpose, var, x, type;
7828 unsigned int talign;
7830 default:
7831 break;
7833 case OMP_CLAUSE_MAP:
7834 case OMP_CLAUSE_TO:
7835 case OMP_CLAUSE_FROM:
7836 oacc_firstprivate_map:
7837 nc = c;
7838 ovar = OMP_CLAUSE_DECL (c);
7839 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7840 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7841 || (OMP_CLAUSE_MAP_KIND (c)
7842 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7843 break;
7844 if (!DECL_P (ovar))
7846 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7847 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7849 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7850 == get_base_address (ovar));
7851 nc = OMP_CLAUSE_CHAIN (c);
7852 ovar = OMP_CLAUSE_DECL (nc);
7854 else
7856 tree x = build_sender_ref (ovar, ctx);
7857 tree v
7858 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7859 gimplify_assign (x, v, &ilist);
7860 nc = NULL_TREE;
7863 else
7865 if (DECL_SIZE (ovar)
7866 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7868 tree ovar2 = DECL_VALUE_EXPR (ovar);
7869 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7870 ovar2 = TREE_OPERAND (ovar2, 0);
7871 gcc_assert (DECL_P (ovar2));
7872 ovar = ovar2;
7874 if (!maybe_lookup_field (ovar, ctx))
7875 continue;
7878 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7879 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7880 talign = DECL_ALIGN_UNIT (ovar);
7881 if (nc)
7883 var = lookup_decl_in_outer_ctx (ovar, ctx);
7884 x = build_sender_ref (ovar, ctx);
7886 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7887 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7888 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7889 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7891 gcc_assert (offloaded);
7892 tree avar
7893 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7894 mark_addressable (avar);
7895 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7896 talign = DECL_ALIGN_UNIT (avar);
7897 avar = build_fold_addr_expr (avar);
7898 gimplify_assign (x, avar, &ilist);
7900 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7902 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7903 if (!omp_is_reference (var))
7905 if (is_gimple_reg (var)
7906 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7907 TREE_NO_WARNING (var) = 1;
7908 var = build_fold_addr_expr (var);
7910 else
7911 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7912 gimplify_assign (x, var, &ilist);
7914 else if (is_gimple_reg (var))
7916 gcc_assert (offloaded);
7917 tree avar = create_tmp_var (TREE_TYPE (var));
7918 mark_addressable (avar);
7919 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7920 if (GOMP_MAP_COPY_TO_P (map_kind)
7921 || map_kind == GOMP_MAP_POINTER
7922 || map_kind == GOMP_MAP_TO_PSET
7923 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7925 /* If we need to initialize a temporary
7926 with VAR because it is not addressable, and
7927 the variable hasn't been initialized yet, then
7928 we'll get a warning for the store to avar.
7929 Don't warn in that case, the mapping might
7930 be implicit. */
7931 TREE_NO_WARNING (var) = 1;
7932 gimplify_assign (avar, var, &ilist);
7934 avar = build_fold_addr_expr (avar);
7935 gimplify_assign (x, avar, &ilist);
7936 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7937 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7938 && !TYPE_READONLY (TREE_TYPE (var)))
7940 x = unshare_expr (x);
7941 x = build_simple_mem_ref (x);
7942 gimplify_assign (var, x, &olist);
7945 else
7947 var = build_fold_addr_expr (var);
7948 gimplify_assign (x, var, &ilist);
7951 s = NULL_TREE;
7952 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7954 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7955 s = TREE_TYPE (ovar);
7956 if (TREE_CODE (s) == REFERENCE_TYPE)
7957 s = TREE_TYPE (s);
7958 s = TYPE_SIZE_UNIT (s);
7960 else
7961 s = OMP_CLAUSE_SIZE (c);
7962 if (s == NULL_TREE)
7963 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7964 s = fold_convert (size_type_node, s);
7965 purpose = size_int (map_idx++);
7966 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7967 if (TREE_CODE (s) != INTEGER_CST)
7968 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7970 unsigned HOST_WIDE_INT tkind, tkind_zero;
7971 switch (OMP_CLAUSE_CODE (c))
7973 case OMP_CLAUSE_MAP:
7974 tkind = OMP_CLAUSE_MAP_KIND (c);
7975 tkind_zero = tkind;
7976 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7977 switch (tkind)
7979 case GOMP_MAP_ALLOC:
7980 case GOMP_MAP_TO:
7981 case GOMP_MAP_FROM:
7982 case GOMP_MAP_TOFROM:
7983 case GOMP_MAP_ALWAYS_TO:
7984 case GOMP_MAP_ALWAYS_FROM:
7985 case GOMP_MAP_ALWAYS_TOFROM:
7986 case GOMP_MAP_RELEASE:
7987 case GOMP_MAP_FORCE_TO:
7988 case GOMP_MAP_FORCE_FROM:
7989 case GOMP_MAP_FORCE_TOFROM:
7990 case GOMP_MAP_FORCE_PRESENT:
7991 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7992 break;
7993 case GOMP_MAP_DELETE:
7994 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7995 default:
7996 break;
7998 if (tkind_zero != tkind)
8000 if (integer_zerop (s))
8001 tkind = tkind_zero;
8002 else if (integer_nonzerop (s))
8003 tkind_zero = tkind;
8005 break;
8006 case OMP_CLAUSE_FIRSTPRIVATE:
8007 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8008 tkind = GOMP_MAP_TO;
8009 tkind_zero = tkind;
8010 break;
8011 case OMP_CLAUSE_TO:
8012 tkind = GOMP_MAP_TO;
8013 tkind_zero = tkind;
8014 break;
8015 case OMP_CLAUSE_FROM:
8016 tkind = GOMP_MAP_FROM;
8017 tkind_zero = tkind;
8018 break;
8019 default:
8020 gcc_unreachable ();
8022 gcc_checking_assert (tkind
8023 < (HOST_WIDE_INT_C (1U) << talign_shift));
8024 gcc_checking_assert (tkind_zero
8025 < (HOST_WIDE_INT_C (1U) << talign_shift));
8026 talign = ceil_log2 (talign);
8027 tkind |= talign << talign_shift;
8028 tkind_zero |= talign << talign_shift;
8029 gcc_checking_assert (tkind
8030 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8031 gcc_checking_assert (tkind_zero
8032 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8033 if (tkind == tkind_zero)
8034 x = build_int_cstu (tkind_type, tkind);
8035 else
8037 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8038 x = build3 (COND_EXPR, tkind_type,
8039 fold_build2 (EQ_EXPR, boolean_type_node,
8040 unshare_expr (s), size_zero_node),
8041 build_int_cstu (tkind_type, tkind_zero),
8042 build_int_cstu (tkind_type, tkind));
8044 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8045 if (nc && nc != c)
8046 c = nc;
8047 break;
8049 case OMP_CLAUSE_FIRSTPRIVATE:
8050 if (is_oacc_parallel (ctx))
8051 goto oacc_firstprivate_map;
8052 ovar = OMP_CLAUSE_DECL (c);
8053 if (omp_is_reference (ovar))
8054 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8055 else
8056 talign = DECL_ALIGN_UNIT (ovar);
8057 var = lookup_decl_in_outer_ctx (ovar, ctx);
8058 x = build_sender_ref (ovar, ctx);
8059 tkind = GOMP_MAP_FIRSTPRIVATE;
8060 type = TREE_TYPE (ovar);
8061 if (omp_is_reference (ovar))
8062 type = TREE_TYPE (type);
8063 if ((INTEGRAL_TYPE_P (type)
8064 && TYPE_PRECISION (type) <= POINTER_SIZE)
8065 || TREE_CODE (type) == POINTER_TYPE)
8067 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8068 tree t = var;
8069 if (omp_is_reference (var))
8070 t = build_simple_mem_ref (var);
8071 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8072 TREE_NO_WARNING (var) = 1;
8073 if (TREE_CODE (type) != POINTER_TYPE)
8074 t = fold_convert (pointer_sized_int_node, t);
8075 t = fold_convert (TREE_TYPE (x), t);
8076 gimplify_assign (x, t, &ilist);
8078 else if (omp_is_reference (var))
8079 gimplify_assign (x, var, &ilist);
8080 else if (is_gimple_reg (var))
8082 tree avar = create_tmp_var (TREE_TYPE (var));
8083 mark_addressable (avar);
8084 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8085 TREE_NO_WARNING (var) = 1;
8086 gimplify_assign (avar, var, &ilist);
8087 avar = build_fold_addr_expr (avar);
8088 gimplify_assign (x, avar, &ilist);
8090 else
8092 var = build_fold_addr_expr (var);
8093 gimplify_assign (x, var, &ilist);
8095 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8096 s = size_int (0);
8097 else if (omp_is_reference (ovar))
8098 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8099 else
8100 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8101 s = fold_convert (size_type_node, s);
8102 purpose = size_int (map_idx++);
8103 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8104 if (TREE_CODE (s) != INTEGER_CST)
8105 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8107 gcc_checking_assert (tkind
8108 < (HOST_WIDE_INT_C (1U) << talign_shift));
8109 talign = ceil_log2 (talign);
8110 tkind |= talign << talign_shift;
8111 gcc_checking_assert (tkind
8112 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8113 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8114 build_int_cstu (tkind_type, tkind));
8115 break;
8117 case OMP_CLAUSE_USE_DEVICE_PTR:
8118 case OMP_CLAUSE_IS_DEVICE_PTR:
8119 ovar = OMP_CLAUSE_DECL (c);
8120 var = lookup_decl_in_outer_ctx (ovar, ctx);
8121 x = build_sender_ref (ovar, ctx);
8122 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8123 tkind = GOMP_MAP_USE_DEVICE_PTR;
8124 else
8125 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8126 type = TREE_TYPE (ovar);
8127 if (TREE_CODE (type) == ARRAY_TYPE)
8128 var = build_fold_addr_expr (var);
8129 else
8131 if (omp_is_reference (ovar))
8133 type = TREE_TYPE (type);
8134 if (TREE_CODE (type) != ARRAY_TYPE)
8135 var = build_simple_mem_ref (var);
8136 var = fold_convert (TREE_TYPE (x), var);
8139 gimplify_assign (x, var, &ilist);
8140 s = size_int (0);
8141 purpose = size_int (map_idx++);
8142 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8143 gcc_checking_assert (tkind
8144 < (HOST_WIDE_INT_C (1U) << talign_shift));
8145 gcc_checking_assert (tkind
8146 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8147 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8148 build_int_cstu (tkind_type, tkind));
8149 break;
8152 gcc_assert (map_idx == map_cnt);
8154 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8155 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8156 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8157 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8158 for (int i = 1; i <= 2; i++)
8159 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8161 gimple_seq initlist = NULL;
8162 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8163 TREE_VEC_ELT (t, i)),
8164 &initlist, true, NULL_TREE);
8165 gimple_seq_add_seq (&ilist, initlist);
8167 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8168 NULL);
8169 TREE_THIS_VOLATILE (clobber) = 1;
8170 gimple_seq_add_stmt (&olist,
8171 gimple_build_assign (TREE_VEC_ELT (t, i),
8172 clobber));
8175 tree clobber = build_constructor (ctx->record_type, NULL);
8176 TREE_THIS_VOLATILE (clobber) = 1;
8177 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8178 clobber));
8181 /* Once all the expansions are done, sequence all the different
8182 fragments inside gimple_omp_body. */
8184 new_body = NULL;
8186 if (offloaded
8187 && ctx->record_type)
8189 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8190 /* fixup_child_record_type might have changed receiver_decl's type. */
8191 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8192 gimple_seq_add_stmt (&new_body,
8193 gimple_build_assign (ctx->receiver_decl, t));
8195 gimple_seq_add_seq (&new_body, fplist);
8197 if (offloaded || data_region)
8199 tree prev = NULL_TREE;
8200 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8201 switch (OMP_CLAUSE_CODE (c))
8203 tree var, x;
8204 default:
8205 break;
8206 case OMP_CLAUSE_FIRSTPRIVATE:
8207 if (is_gimple_omp_oacc (ctx->stmt))
8208 break;
8209 var = OMP_CLAUSE_DECL (c);
8210 if (omp_is_reference (var)
8211 || is_gimple_reg_type (TREE_TYPE (var)))
8213 tree new_var = lookup_decl (var, ctx);
8214 tree type;
8215 type = TREE_TYPE (var);
8216 if (omp_is_reference (var))
8217 type = TREE_TYPE (type);
8218 if ((INTEGRAL_TYPE_P (type)
8219 && TYPE_PRECISION (type) <= POINTER_SIZE)
8220 || TREE_CODE (type) == POINTER_TYPE)
8222 x = build_receiver_ref (var, false, ctx);
8223 if (TREE_CODE (type) != POINTER_TYPE)
8224 x = fold_convert (pointer_sized_int_node, x);
8225 x = fold_convert (type, x);
8226 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8227 fb_rvalue);
8228 if (omp_is_reference (var))
8230 tree v = create_tmp_var_raw (type, get_name (var));
8231 gimple_add_tmp_var (v);
8232 TREE_ADDRESSABLE (v) = 1;
8233 gimple_seq_add_stmt (&new_body,
8234 gimple_build_assign (v, x));
8235 x = build_fold_addr_expr (v);
8237 gimple_seq_add_stmt (&new_body,
8238 gimple_build_assign (new_var, x));
8240 else
8242 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8243 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8244 fb_rvalue);
8245 gimple_seq_add_stmt (&new_body,
8246 gimple_build_assign (new_var, x));
8249 else if (is_variable_sized (var))
8251 tree pvar = DECL_VALUE_EXPR (var);
8252 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8253 pvar = TREE_OPERAND (pvar, 0);
8254 gcc_assert (DECL_P (pvar));
8255 tree new_var = lookup_decl (pvar, ctx);
8256 x = build_receiver_ref (var, false, ctx);
8257 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8258 gimple_seq_add_stmt (&new_body,
8259 gimple_build_assign (new_var, x));
8261 break;
8262 case OMP_CLAUSE_PRIVATE:
8263 if (is_gimple_omp_oacc (ctx->stmt))
8264 break;
8265 var = OMP_CLAUSE_DECL (c);
8266 if (omp_is_reference (var))
8268 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8269 tree new_var = lookup_decl (var, ctx);
8270 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8271 if (TREE_CONSTANT (x))
8273 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8274 get_name (var));
8275 gimple_add_tmp_var (x);
8276 TREE_ADDRESSABLE (x) = 1;
8277 x = build_fold_addr_expr_loc (clause_loc, x);
8279 else
8280 break;
8282 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8283 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8284 gimple_seq_add_stmt (&new_body,
8285 gimple_build_assign (new_var, x));
8287 break;
8288 case OMP_CLAUSE_USE_DEVICE_PTR:
8289 case OMP_CLAUSE_IS_DEVICE_PTR:
8290 var = OMP_CLAUSE_DECL (c);
8291 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8292 x = build_sender_ref (var, ctx);
8293 else
8294 x = build_receiver_ref (var, false, ctx);
8295 if (is_variable_sized (var))
8297 tree pvar = DECL_VALUE_EXPR (var);
8298 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8299 pvar = TREE_OPERAND (pvar, 0);
8300 gcc_assert (DECL_P (pvar));
8301 tree new_var = lookup_decl (pvar, ctx);
8302 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8303 gimple_seq_add_stmt (&new_body,
8304 gimple_build_assign (new_var, x));
8306 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8308 tree new_var = lookup_decl (var, ctx);
8309 new_var = DECL_VALUE_EXPR (new_var);
8310 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8311 new_var = TREE_OPERAND (new_var, 0);
8312 gcc_assert (DECL_P (new_var));
8313 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8314 gimple_seq_add_stmt (&new_body,
8315 gimple_build_assign (new_var, x));
8317 else
8319 tree type = TREE_TYPE (var);
8320 tree new_var = lookup_decl (var, ctx);
8321 if (omp_is_reference (var))
8323 type = TREE_TYPE (type);
8324 if (TREE_CODE (type) != ARRAY_TYPE)
8326 tree v = create_tmp_var_raw (type, get_name (var));
8327 gimple_add_tmp_var (v);
8328 TREE_ADDRESSABLE (v) = 1;
8329 x = fold_convert (type, x);
8330 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8331 fb_rvalue);
8332 gimple_seq_add_stmt (&new_body,
8333 gimple_build_assign (v, x));
8334 x = build_fold_addr_expr (v);
8337 new_var = DECL_VALUE_EXPR (new_var);
8338 x = fold_convert (TREE_TYPE (new_var), x);
8339 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8340 gimple_seq_add_stmt (&new_body,
8341 gimple_build_assign (new_var, x));
8343 break;
8345 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8346 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8347 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8348 or references to VLAs. */
8349 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8350 switch (OMP_CLAUSE_CODE (c))
8352 tree var;
8353 default:
8354 break;
8355 case OMP_CLAUSE_MAP:
8356 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8357 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8359 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8360 HOST_WIDE_INT offset = 0;
8361 gcc_assert (prev);
8362 var = OMP_CLAUSE_DECL (c);
8363 if (DECL_P (var)
8364 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8365 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8366 ctx))
8367 && varpool_node::get_create (var)->offloadable)
8368 break;
8369 if (TREE_CODE (var) == INDIRECT_REF
8370 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8371 var = TREE_OPERAND (var, 0);
8372 if (TREE_CODE (var) == COMPONENT_REF)
8374 var = get_addr_base_and_unit_offset (var, &offset);
8375 gcc_assert (var != NULL_TREE && DECL_P (var));
8377 else if (DECL_SIZE (var)
8378 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8380 tree var2 = DECL_VALUE_EXPR (var);
8381 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8382 var2 = TREE_OPERAND (var2, 0);
8383 gcc_assert (DECL_P (var2));
8384 var = var2;
8386 tree new_var = lookup_decl (var, ctx), x;
8387 tree type = TREE_TYPE (new_var);
8388 bool is_ref;
8389 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8390 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8391 == COMPONENT_REF))
8393 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8394 is_ref = true;
8395 new_var = build2 (MEM_REF, type,
8396 build_fold_addr_expr (new_var),
8397 build_int_cst (build_pointer_type (type),
8398 offset));
8400 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8402 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8403 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8404 new_var = build2 (MEM_REF, type,
8405 build_fold_addr_expr (new_var),
8406 build_int_cst (build_pointer_type (type),
8407 offset));
8409 else
8410 is_ref = omp_is_reference (var);
8411 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8412 is_ref = false;
8413 bool ref_to_array = false;
8414 if (is_ref)
8416 type = TREE_TYPE (type);
8417 if (TREE_CODE (type) == ARRAY_TYPE)
8419 type = build_pointer_type (type);
8420 ref_to_array = true;
8423 else if (TREE_CODE (type) == ARRAY_TYPE)
8425 tree decl2 = DECL_VALUE_EXPR (new_var);
8426 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8427 decl2 = TREE_OPERAND (decl2, 0);
8428 gcc_assert (DECL_P (decl2));
8429 new_var = decl2;
8430 type = TREE_TYPE (new_var);
8432 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8433 x = fold_convert_loc (clause_loc, type, x);
8434 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8436 tree bias = OMP_CLAUSE_SIZE (c);
8437 if (DECL_P (bias))
8438 bias = lookup_decl (bias, ctx);
8439 bias = fold_convert_loc (clause_loc, sizetype, bias);
8440 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8441 bias);
8442 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8443 TREE_TYPE (x), x, bias);
8445 if (ref_to_array)
8446 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8447 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8448 if (is_ref && !ref_to_array)
8450 tree t = create_tmp_var_raw (type, get_name (var));
8451 gimple_add_tmp_var (t);
8452 TREE_ADDRESSABLE (t) = 1;
8453 gimple_seq_add_stmt (&new_body,
8454 gimple_build_assign (t, x));
8455 x = build_fold_addr_expr_loc (clause_loc, t);
8457 gimple_seq_add_stmt (&new_body,
8458 gimple_build_assign (new_var, x));
8459 prev = NULL_TREE;
8461 else if (OMP_CLAUSE_CHAIN (c)
8462 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8463 == OMP_CLAUSE_MAP
8464 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8465 == GOMP_MAP_FIRSTPRIVATE_POINTER
8466 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8467 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8468 prev = c;
8469 break;
8470 case OMP_CLAUSE_PRIVATE:
8471 var = OMP_CLAUSE_DECL (c);
8472 if (is_variable_sized (var))
8474 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8475 tree new_var = lookup_decl (var, ctx);
8476 tree pvar = DECL_VALUE_EXPR (var);
8477 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8478 pvar = TREE_OPERAND (pvar, 0);
8479 gcc_assert (DECL_P (pvar));
8480 tree new_pvar = lookup_decl (pvar, ctx);
8481 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8482 tree al = size_int (DECL_ALIGN (var));
8483 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8484 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8485 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8486 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8487 gimple_seq_add_stmt (&new_body,
8488 gimple_build_assign (new_pvar, x));
8490 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8492 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8493 tree new_var = lookup_decl (var, ctx);
8494 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8495 if (TREE_CONSTANT (x))
8496 break;
8497 else
8499 tree atmp
8500 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8501 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8502 tree al = size_int (TYPE_ALIGN (rtype));
8503 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8506 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8507 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8508 gimple_seq_add_stmt (&new_body,
8509 gimple_build_assign (new_var, x));
8511 break;
8514 gimple_seq fork_seq = NULL;
8515 gimple_seq join_seq = NULL;
8517 if (is_oacc_parallel (ctx))
8519 /* If there are reductions on the offloaded region itself, treat
8520 them as a dummy GANG loop. */
8521 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8523 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8524 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8527 gimple_seq_add_seq (&new_body, fork_seq);
8528 gimple_seq_add_seq (&new_body, tgt_body);
8529 gimple_seq_add_seq (&new_body, join_seq);
8531 if (offloaded)
8532 new_body = maybe_catch_exception (new_body);
8534 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8535 gimple_omp_set_body (stmt, new_body);
8538 bind = gimple_build_bind (NULL, NULL,
8539 tgt_bind ? gimple_bind_block (tgt_bind)
8540 : NULL_TREE);
8541 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8542 gimple_bind_add_seq (bind, ilist);
8543 gimple_bind_add_stmt (bind, stmt);
8544 gimple_bind_add_seq (bind, olist);
8546 pop_gimplify_context (NULL);
8548 if (dep_bind)
8550 gimple_bind_add_seq (dep_bind, dep_ilist);
8551 gimple_bind_add_stmt (dep_bind, bind);
8552 gimple_bind_add_seq (dep_bind, dep_olist);
8553 pop_gimplify_context (dep_bind);
8557 /* Expand code for an OpenMP teams directive. */
8559 static void
8560 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8562 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8563 push_gimplify_context ();
8565 tree block = make_node (BLOCK);
8566 gbind *bind = gimple_build_bind (NULL, NULL, block);
8567 gsi_replace (gsi_p, bind, true);
8568 gimple_seq bind_body = NULL;
8569 gimple_seq dlist = NULL;
8570 gimple_seq olist = NULL;
8572 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8573 OMP_CLAUSE_NUM_TEAMS);
8574 if (num_teams == NULL_TREE)
8575 num_teams = build_int_cst (unsigned_type_node, 0);
8576 else
8578 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8579 num_teams = fold_convert (unsigned_type_node, num_teams);
8580 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8582 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8583 OMP_CLAUSE_THREAD_LIMIT);
8584 if (thread_limit == NULL_TREE)
8585 thread_limit = build_int_cst (unsigned_type_node, 0);
8586 else
8588 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8589 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8590 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8591 fb_rvalue);
8594 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8595 &bind_body, &dlist, ctx, NULL);
8596 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8597 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8598 if (!gimple_omp_teams_grid_phony (teams_stmt))
8600 gimple_seq_add_stmt (&bind_body, teams_stmt);
8601 location_t loc = gimple_location (teams_stmt);
8602 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8603 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8604 gimple_set_location (call, loc);
8605 gimple_seq_add_stmt (&bind_body, call);
8608 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8609 gimple_omp_set_body (teams_stmt, NULL);
8610 gimple_seq_add_seq (&bind_body, olist);
8611 gimple_seq_add_seq (&bind_body, dlist);
8612 if (!gimple_omp_teams_grid_phony (teams_stmt))
8613 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8614 gimple_bind_set_body (bind, bind_body);
8616 pop_gimplify_context (bind);
8618 gimple_bind_append_vars (bind, ctx->block_vars);
8619 BLOCK_VARS (block) = ctx->block_vars;
8620 if (BLOCK_VARS (block))
8621 TREE_USED (block) = 1;
8624 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8626 static void
8627 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8629 gimple *stmt = gsi_stmt (*gsi_p);
8630 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8631 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8632 gimple_build_omp_return (false));
8636 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8637 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8638 of OMP context, but with task_shared_vars set. */
8640 static tree
8641 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8642 void *data)
8644 tree t = *tp;
8646 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8647 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8648 return t;
8650 if (task_shared_vars
8651 && DECL_P (t)
8652 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8653 return t;
8655 /* If a global variable has been privatized, TREE_CONSTANT on
8656 ADDR_EXPR might be wrong. */
8657 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8658 recompute_tree_invariant_for_addr_expr (t);
8660 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8661 return NULL_TREE;
8664 /* Data to be communicated between lower_omp_regimplify_operands and
8665 lower_omp_regimplify_operands_p. */
8667 struct lower_omp_regimplify_operands_data
8669 omp_context *ctx;
8670 vec<tree> *decls;
8673 /* Helper function for lower_omp_regimplify_operands. Find
8674 omp_member_access_dummy_var vars and adjust temporarily their
8675 DECL_VALUE_EXPRs if needed. */
8677 static tree
8678 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8679 void *data)
8681 tree t = omp_member_access_dummy_var (*tp);
8682 if (t)
8684 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8685 lower_omp_regimplify_operands_data *ldata
8686 = (lower_omp_regimplify_operands_data *) wi->info;
8687 tree o = maybe_lookup_decl (t, ldata->ctx);
8688 if (o != t)
8690 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8691 ldata->decls->safe_push (*tp);
8692 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8693 SET_DECL_VALUE_EXPR (*tp, v);
8696 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8697 return NULL_TREE;
8700 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8701 of omp_member_access_dummy_var vars during regimplification. */
8703 static void
8704 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8705 gimple_stmt_iterator *gsi_p)
8707 auto_vec<tree, 10> decls;
8708 if (ctx)
8710 struct walk_stmt_info wi;
8711 memset (&wi, '\0', sizeof (wi));
8712 struct lower_omp_regimplify_operands_data data;
8713 data.ctx = ctx;
8714 data.decls = &decls;
8715 wi.info = &data;
8716 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8718 gimple_regimplify_operands (stmt, gsi_p);
8719 while (!decls.is_empty ())
8721 tree t = decls.pop ();
8722 tree v = decls.pop ();
8723 SET_DECL_VALUE_EXPR (t, v);
8727 static void
8728 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8730 gimple *stmt = gsi_stmt (*gsi_p);
8731 struct walk_stmt_info wi;
8732 gcall *call_stmt;
8734 if (gimple_has_location (stmt))
8735 input_location = gimple_location (stmt);
8737 if (task_shared_vars)
8738 memset (&wi, '\0', sizeof (wi));
8740 /* If we have issued syntax errors, avoid doing any heavy lifting.
8741 Just replace the OMP directives with a NOP to avoid
8742 confusing RTL expansion. */
8743 if (seen_error () && is_gimple_omp (stmt))
8745 gsi_replace (gsi_p, gimple_build_nop (), true);
8746 return;
8749 switch (gimple_code (stmt))
8751 case GIMPLE_COND:
8753 gcond *cond_stmt = as_a <gcond *> (stmt);
8754 if ((ctx || task_shared_vars)
8755 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8756 lower_omp_regimplify_p,
8757 ctx ? NULL : &wi, NULL)
8758 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8759 lower_omp_regimplify_p,
8760 ctx ? NULL : &wi, NULL)))
8761 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8763 break;
8764 case GIMPLE_CATCH:
8765 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8766 break;
8767 case GIMPLE_EH_FILTER:
8768 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8769 break;
8770 case GIMPLE_TRY:
8771 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8772 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8773 break;
8774 case GIMPLE_TRANSACTION:
8775 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8776 ctx);
8777 break;
8778 case GIMPLE_BIND:
8779 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8780 break;
8781 case GIMPLE_OMP_PARALLEL:
8782 case GIMPLE_OMP_TASK:
8783 ctx = maybe_lookup_ctx (stmt);
8784 gcc_assert (ctx);
8785 if (ctx->cancellable)
8786 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8787 lower_omp_taskreg (gsi_p, ctx);
8788 break;
8789 case GIMPLE_OMP_FOR:
8790 ctx = maybe_lookup_ctx (stmt);
8791 gcc_assert (ctx);
8792 if (ctx->cancellable)
8793 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8794 lower_omp_for (gsi_p, ctx);
8795 break;
8796 case GIMPLE_OMP_SECTIONS:
8797 ctx = maybe_lookup_ctx (stmt);
8798 gcc_assert (ctx);
8799 if (ctx->cancellable)
8800 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8801 lower_omp_sections (gsi_p, ctx);
8802 break;
8803 case GIMPLE_OMP_SINGLE:
8804 ctx = maybe_lookup_ctx (stmt);
8805 gcc_assert (ctx);
8806 lower_omp_single (gsi_p, ctx);
8807 break;
8808 case GIMPLE_OMP_MASTER:
8809 ctx = maybe_lookup_ctx (stmt);
8810 gcc_assert (ctx);
8811 lower_omp_master (gsi_p, ctx);
8812 break;
8813 case GIMPLE_OMP_TASKGROUP:
8814 ctx = maybe_lookup_ctx (stmt);
8815 gcc_assert (ctx);
8816 lower_omp_taskgroup (gsi_p, ctx);
8817 break;
8818 case GIMPLE_OMP_ORDERED:
8819 ctx = maybe_lookup_ctx (stmt);
8820 gcc_assert (ctx);
8821 lower_omp_ordered (gsi_p, ctx);
8822 break;
8823 case GIMPLE_OMP_CRITICAL:
8824 ctx = maybe_lookup_ctx (stmt);
8825 gcc_assert (ctx);
8826 lower_omp_critical (gsi_p, ctx);
8827 break;
8828 case GIMPLE_OMP_ATOMIC_LOAD:
8829 if ((ctx || task_shared_vars)
8830 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8831 as_a <gomp_atomic_load *> (stmt)),
8832 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8833 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8834 break;
8835 case GIMPLE_OMP_TARGET:
8836 ctx = maybe_lookup_ctx (stmt);
8837 gcc_assert (ctx);
8838 lower_omp_target (gsi_p, ctx);
8839 break;
8840 case GIMPLE_OMP_TEAMS:
8841 ctx = maybe_lookup_ctx (stmt);
8842 gcc_assert (ctx);
8843 lower_omp_teams (gsi_p, ctx);
8844 break;
8845 case GIMPLE_OMP_GRID_BODY:
8846 ctx = maybe_lookup_ctx (stmt);
8847 gcc_assert (ctx);
8848 lower_omp_grid_body (gsi_p, ctx);
8849 break;
8850 case GIMPLE_CALL:
8851 tree fndecl;
8852 call_stmt = as_a <gcall *> (stmt);
8853 fndecl = gimple_call_fndecl (call_stmt);
8854 if (fndecl
8855 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8856 switch (DECL_FUNCTION_CODE (fndecl))
8858 case BUILT_IN_GOMP_BARRIER:
8859 if (ctx == NULL)
8860 break;
8861 /* FALLTHRU */
8862 case BUILT_IN_GOMP_CANCEL:
8863 case BUILT_IN_GOMP_CANCELLATION_POINT:
8864 omp_context *cctx;
8865 cctx = ctx;
8866 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8867 cctx = cctx->outer;
8868 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8869 if (!cctx->cancellable)
8871 if (DECL_FUNCTION_CODE (fndecl)
8872 == BUILT_IN_GOMP_CANCELLATION_POINT)
8874 stmt = gimple_build_nop ();
8875 gsi_replace (gsi_p, stmt, false);
8877 break;
8879 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8881 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8882 gimple_call_set_fndecl (call_stmt, fndecl);
8883 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8885 tree lhs;
8886 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8887 gimple_call_set_lhs (call_stmt, lhs);
8888 tree fallthru_label;
8889 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8890 gimple *g;
8891 g = gimple_build_label (fallthru_label);
8892 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8893 g = gimple_build_cond (NE_EXPR, lhs,
8894 fold_convert (TREE_TYPE (lhs),
8895 boolean_false_node),
8896 cctx->cancel_label, fallthru_label);
8897 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8898 break;
8899 default:
8900 break;
8902 /* FALLTHRU */
8903 default:
8904 if ((ctx || task_shared_vars)
8905 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8906 ctx ? NULL : &wi))
8908 /* Just remove clobbers, this should happen only if we have
8909 "privatized" local addressable variables in SIMD regions,
8910 the clobber isn't needed in that case and gimplifying address
8911 of the ARRAY_REF into a pointer and creating MEM_REF based
8912 clobber would create worse code than we get with the clobber
8913 dropped. */
8914 if (gimple_clobber_p (stmt))
8916 gsi_replace (gsi_p, gimple_build_nop (), true);
8917 break;
8919 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8921 break;
8925 static void
8926 lower_omp (gimple_seq *body, omp_context *ctx)
8928 location_t saved_location = input_location;
8929 gimple_stmt_iterator gsi;
8930 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8931 lower_omp_1 (&gsi, ctx);
8932 /* During gimplification, we haven't folded statments inside offloading
8933 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8934 if (target_nesting_level || taskreg_nesting_level)
8935 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8936 fold_stmt (&gsi);
8937 input_location = saved_location;
8940 /* Main entry point. */
8942 static unsigned int
8943 execute_lower_omp (void)
8945 gimple_seq body;
8946 int i;
8947 omp_context *ctx;
8949 /* This pass always runs, to provide PROP_gimple_lomp.
8950 But often, there is nothing to do. */
8951 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8952 && flag_openmp_simd == 0)
8953 return 0;
8955 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8956 delete_omp_context);
8958 body = gimple_body (current_function_decl);
8960 if (hsa_gen_requested_p ())
8961 omp_grid_gridify_all_targets (&body);
8963 scan_omp (&body, NULL);
8964 gcc_assert (taskreg_nesting_level == 0);
8965 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8966 finish_taskreg_scan (ctx);
8967 taskreg_contexts.release ();
8969 if (all_contexts->root)
8971 if (task_shared_vars)
8972 push_gimplify_context ();
8973 lower_omp (&body, NULL);
8974 if (task_shared_vars)
8975 pop_gimplify_context (NULL);
8978 if (all_contexts)
8980 splay_tree_delete (all_contexts);
8981 all_contexts = NULL;
8983 BITMAP_FREE (task_shared_vars);
8984 return 0;
8987 namespace {
8989 const pass_data pass_data_lower_omp =
8991 GIMPLE_PASS, /* type */
8992 "omplower", /* name */
8993 OPTGROUP_OMP, /* optinfo_flags */
8994 TV_NONE, /* tv_id */
8995 PROP_gimple_any, /* properties_required */
8996 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8997 0, /* properties_destroyed */
8998 0, /* todo_flags_start */
8999 0, /* todo_flags_finish */
9002 class pass_lower_omp : public gimple_opt_pass
9004 public:
9005 pass_lower_omp (gcc::context *ctxt)
9006 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9009 /* opt_pass methods: */
9010 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9012 }; // class pass_lower_omp
9014 } // anon namespace
9016 gimple_opt_pass *
9017 make_pass_lower_omp (gcc::context *ctxt)
9019 return new pass_lower_omp (ctxt);
9022 /* The following is a utility to diagnose structured block violations.
9023 It is not part of the "omplower" pass, as that's invoked too late. It
9024 should be invoked by the respective front ends after gimplification. */
9026 static splay_tree all_labels;
9028 /* Check for mismatched contexts and generate an error if needed. Return
9029 true if an error is detected. */
9031 static bool
9032 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9033 gimple *branch_ctx, gimple *label_ctx)
9035 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9036 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9038 if (label_ctx == branch_ctx)
9039 return false;
9041 const char* kind = NULL;
9043 if (flag_cilkplus)
9045 if ((branch_ctx
9046 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9047 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9048 || (label_ctx
9049 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9050 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9051 kind = "Cilk Plus";
9053 if (flag_openacc)
9055 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9056 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9058 gcc_checking_assert (kind == NULL);
9059 kind = "OpenACC";
9062 if (kind == NULL)
9064 gcc_checking_assert (flag_openmp);
9065 kind = "OpenMP";
9068 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9069 so we could traverse it and issue a correct "exit" or "enter" error
9070 message upon a structured block violation.
9072 We built the context by building a list with tree_cons'ing, but there is
9073 no easy counterpart in gimple tuples. It seems like far too much work
9074 for issuing exit/enter error messages. If someone really misses the
9075 distinct error message... patches welcome. */
9077 #if 0
9078 /* Try to avoid confusing the user by producing and error message
9079 with correct "exit" or "enter" verbiage. We prefer "exit"
9080 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9081 if (branch_ctx == NULL)
9082 exit_p = false;
9083 else
9085 while (label_ctx)
9087 if (TREE_VALUE (label_ctx) == branch_ctx)
9089 exit_p = false;
9090 break;
9092 label_ctx = TREE_CHAIN (label_ctx);
9096 if (exit_p)
9097 error ("invalid exit from %s structured block", kind);
9098 else
9099 error ("invalid entry to %s structured block", kind);
9100 #endif
9102 /* If it's obvious we have an invalid entry, be specific about the error. */
9103 if (branch_ctx == NULL)
9104 error ("invalid entry to %s structured block", kind);
9105 else
9107 /* Otherwise, be vague and lazy, but efficient. */
9108 error ("invalid branch to/from %s structured block", kind);
9111 gsi_replace (gsi_p, gimple_build_nop (), false);
9112 return true;
9115 /* Pass 1: Create a minimal tree of structured blocks, and record
9116 where each label is found. */
9118 static tree
9119 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9120 struct walk_stmt_info *wi)
9122 gimple *context = (gimple *) wi->info;
9123 gimple *inner_context;
9124 gimple *stmt = gsi_stmt (*gsi_p);
9126 *handled_ops_p = true;
9128 switch (gimple_code (stmt))
9130 WALK_SUBSTMTS;
9132 case GIMPLE_OMP_PARALLEL:
9133 case GIMPLE_OMP_TASK:
9134 case GIMPLE_OMP_SECTIONS:
9135 case GIMPLE_OMP_SINGLE:
9136 case GIMPLE_OMP_SECTION:
9137 case GIMPLE_OMP_MASTER:
9138 case GIMPLE_OMP_ORDERED:
9139 case GIMPLE_OMP_CRITICAL:
9140 case GIMPLE_OMP_TARGET:
9141 case GIMPLE_OMP_TEAMS:
9142 case GIMPLE_OMP_TASKGROUP:
9143 /* The minimal context here is just the current OMP construct. */
9144 inner_context = stmt;
9145 wi->info = inner_context;
9146 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9147 wi->info = context;
9148 break;
9150 case GIMPLE_OMP_FOR:
9151 inner_context = stmt;
9152 wi->info = inner_context;
9153 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9154 walk them. */
9155 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9156 diagnose_sb_1, NULL, wi);
9157 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9158 wi->info = context;
9159 break;
9161 case GIMPLE_LABEL:
9162 splay_tree_insert (all_labels,
9163 (splay_tree_key) gimple_label_label (
9164 as_a <glabel *> (stmt)),
9165 (splay_tree_value) context);
9166 break;
9168 default:
9169 break;
9172 return NULL_TREE;
9175 /* Pass 2: Check each branch and see if its context differs from that of
9176 the destination label's context. */
9178 static tree
9179 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9180 struct walk_stmt_info *wi)
9182 gimple *context = (gimple *) wi->info;
9183 splay_tree_node n;
9184 gimple *stmt = gsi_stmt (*gsi_p);
9186 *handled_ops_p = true;
9188 switch (gimple_code (stmt))
9190 WALK_SUBSTMTS;
9192 case GIMPLE_OMP_PARALLEL:
9193 case GIMPLE_OMP_TASK:
9194 case GIMPLE_OMP_SECTIONS:
9195 case GIMPLE_OMP_SINGLE:
9196 case GIMPLE_OMP_SECTION:
9197 case GIMPLE_OMP_MASTER:
9198 case GIMPLE_OMP_ORDERED:
9199 case GIMPLE_OMP_CRITICAL:
9200 case GIMPLE_OMP_TARGET:
9201 case GIMPLE_OMP_TEAMS:
9202 case GIMPLE_OMP_TASKGROUP:
9203 wi->info = stmt;
9204 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9205 wi->info = context;
9206 break;
9208 case GIMPLE_OMP_FOR:
9209 wi->info = stmt;
9210 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9211 walk them. */
9212 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9213 diagnose_sb_2, NULL, wi);
9214 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9215 wi->info = context;
9216 break;
9218 case GIMPLE_COND:
9220 gcond *cond_stmt = as_a <gcond *> (stmt);
9221 tree lab = gimple_cond_true_label (cond_stmt);
9222 if (lab)
9224 n = splay_tree_lookup (all_labels,
9225 (splay_tree_key) lab);
9226 diagnose_sb_0 (gsi_p, context,
9227 n ? (gimple *) n->value : NULL);
9229 lab = gimple_cond_false_label (cond_stmt);
9230 if (lab)
9232 n = splay_tree_lookup (all_labels,
9233 (splay_tree_key) lab);
9234 diagnose_sb_0 (gsi_p, context,
9235 n ? (gimple *) n->value : NULL);
9238 break;
9240 case GIMPLE_GOTO:
9242 tree lab = gimple_goto_dest (stmt);
9243 if (TREE_CODE (lab) != LABEL_DECL)
9244 break;
9246 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9247 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9249 break;
9251 case GIMPLE_SWITCH:
9253 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9254 unsigned int i;
9255 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9257 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9258 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9259 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9260 break;
9263 break;
9265 case GIMPLE_RETURN:
9266 diagnose_sb_0 (gsi_p, context, NULL);
9267 break;
9269 default:
9270 break;
9273 return NULL_TREE;
9276 static unsigned int
9277 diagnose_omp_structured_block_errors (void)
9279 struct walk_stmt_info wi;
9280 gimple_seq body = gimple_body (current_function_decl);
9282 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9284 memset (&wi, 0, sizeof (wi));
9285 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9287 memset (&wi, 0, sizeof (wi));
9288 wi.want_locations = true;
9289 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9291 gimple_set_body (current_function_decl, body);
9293 splay_tree_delete (all_labels);
9294 all_labels = NULL;
9296 return 0;
9299 namespace {
9301 const pass_data pass_data_diagnose_omp_blocks =
9303 GIMPLE_PASS, /* type */
9304 "*diagnose_omp_blocks", /* name */
9305 OPTGROUP_OMP, /* optinfo_flags */
9306 TV_NONE, /* tv_id */
9307 PROP_gimple_any, /* properties_required */
9308 0, /* properties_provided */
9309 0, /* properties_destroyed */
9310 0, /* todo_flags_start */
9311 0, /* todo_flags_finish */
9314 class pass_diagnose_omp_blocks : public gimple_opt_pass
9316 public:
9317 pass_diagnose_omp_blocks (gcc::context *ctxt)
9318 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9321 /* opt_pass methods: */
9322 virtual bool gate (function *)
9324 return flag_cilkplus || flag_openacc || flag_openmp;
9326 virtual unsigned int execute (function *)
9328 return diagnose_omp_structured_block_errors ();
9331 }; // class pass_diagnose_omp_blocks
9333 } // anon namespace
9335 gimple_opt_pass *
9336 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9338 return new pass_diagnose_omp_blocks (ctxt);
9342 #include "gt-omp-low.h"