2017-04-07 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / omp-low.c
blob253dc8563744d14cccb2079cfb1ec15f9d60c906
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
115 /* What to do with variables with implicitly determined sharing
116 attributes. */
117 enum omp_clause_default_kind default_kind;
119 /* Nesting depth of this context. Used to beautify error messages re
120 invalid gotos. The outermost ctx is depth 1, with depth 0 being
121 reserved for the main body of the function. */
122 int depth;
124 /* True if this parallel directive is nested within another. */
125 bool is_nested;
127 /* True if this construct can be cancelled. */
128 bool cancellable;
131 static splay_tree all_contexts;
132 static int taskreg_nesting_level;
133 static int target_nesting_level;
134 static bitmap task_shared_vars;
135 static vec<omp_context *> taskreg_contexts;
137 static void scan_omp (gimple_seq *, omp_context *);
138 static tree scan_omp_1_op (tree *, int *, void *);
140 #define WALK_SUBSTMTS \
141 case GIMPLE_BIND: \
142 case GIMPLE_TRY: \
143 case GIMPLE_CATCH: \
144 case GIMPLE_EH_FILTER: \
145 case GIMPLE_TRANSACTION: \
146 /* The sub-statements for these should be walked. */ \
147 *handled_ops_p = false; \
148 break;
150 /* Return true if CTX corresponds to an oacc parallel region. */
152 static bool
153 is_oacc_parallel (omp_context *ctx)
155 enum gimple_code outer_type = gimple_code (ctx->stmt);
156 return ((outer_type == GIMPLE_OMP_TARGET)
157 && (gimple_omp_target_kind (ctx->stmt)
158 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
161 /* Return true if CTX corresponds to an oacc kernels region. */
163 static bool
164 is_oacc_kernels (omp_context *ctx)
166 enum gimple_code outer_type = gimple_code (ctx->stmt);
167 return ((outer_type == GIMPLE_OMP_TARGET)
168 && (gimple_omp_target_kind (ctx->stmt)
169 == GF_OMP_TARGET_KIND_OACC_KERNELS));
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173 data member privatization, return the underlying "this" parameter,
174 otherwise return NULL. */
176 tree
177 omp_member_access_dummy_var (tree decl)
179 if (!VAR_P (decl)
180 || !DECL_ARTIFICIAL (decl)
181 || !DECL_IGNORED_P (decl)
182 || !DECL_HAS_VALUE_EXPR_P (decl)
183 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
184 return NULL_TREE;
186 tree v = DECL_VALUE_EXPR (decl);
187 if (TREE_CODE (v) != COMPONENT_REF)
188 return NULL_TREE;
190 while (1)
191 switch (TREE_CODE (v))
193 case COMPONENT_REF:
194 case MEM_REF:
195 case INDIRECT_REF:
196 CASE_CONVERT:
197 case POINTER_PLUS_EXPR:
198 v = TREE_OPERAND (v, 0);
199 continue;
200 case PARM_DECL:
201 if (DECL_CONTEXT (v) == current_function_decl
202 && DECL_ARTIFICIAL (v)
203 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
204 return v;
205 return NULL_TREE;
206 default:
207 return NULL_TREE;
211 /* Helper for unshare_and_remap, called through walk_tree. */
213 static tree
214 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
216 tree *pair = (tree *) data;
217 if (*tp == pair[0])
219 *tp = unshare_expr (pair[1]);
220 *walk_subtrees = 0;
222 else if (IS_TYPE_OR_DECL_P (*tp))
223 *walk_subtrees = 0;
224 return NULL_TREE;
227 /* Return unshare_expr (X) with all occurrences of FROM
228 replaced with TO. */
230 static tree
231 unshare_and_remap (tree x, tree from, tree to)
233 tree pair[2] = { from, to };
234 x = unshare_expr (x);
235 walk_tree (&x, unshare_and_remap_1, pair, NULL);
236 return x;
239 /* Convenience function for calling scan_omp_1_op on tree operands. */
241 static inline tree
242 scan_omp_op (tree *tp, omp_context *ctx)
244 struct walk_stmt_info wi;
246 memset (&wi, 0, sizeof (wi));
247 wi.info = ctx;
248 wi.want_locations = true;
250 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
253 static void lower_omp (gimple_seq *, omp_context *);
254 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
255 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
257 /* Return true if CTX is for an omp parallel. */
259 static inline bool
260 is_parallel_ctx (omp_context *ctx)
262 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
266 /* Return true if CTX is for an omp task. */
268 static inline bool
269 is_task_ctx (omp_context *ctx)
271 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
275 /* Return true if CTX is for an omp taskloop. */
277 static inline bool
278 is_taskloop_ctx (omp_context *ctx)
280 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
281 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
285 /* Return true if CTX is for an omp parallel or omp task. */
287 static inline bool
288 is_taskreg_ctx (omp_context *ctx)
290 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
293 /* Return true if EXPR is variable sized. */
295 static inline bool
296 is_variable_sized (const_tree expr)
298 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
301 /* Lookup variables. The "maybe" form
302 allows for the variable form to not have been entered, otherwise we
303 assert that the variable must have been entered. */
305 static inline tree
306 lookup_decl (tree var, omp_context *ctx)
308 tree *n = ctx->cb.decl_map->get (var);
309 return *n;
312 static inline tree
313 maybe_lookup_decl (const_tree var, omp_context *ctx)
315 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
316 return n ? *n : NULL_TREE;
319 static inline tree
320 lookup_field (tree var, omp_context *ctx)
322 splay_tree_node n;
323 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
324 return (tree) n->value;
327 static inline tree
328 lookup_sfield (splay_tree_key key, omp_context *ctx)
330 splay_tree_node n;
331 n = splay_tree_lookup (ctx->sfield_map
332 ? ctx->sfield_map : ctx->field_map, key);
333 return (tree) n->value;
336 static inline tree
337 lookup_sfield (tree var, omp_context *ctx)
339 return lookup_sfield ((splay_tree_key) var, ctx);
342 static inline tree
343 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
345 splay_tree_node n;
346 n = splay_tree_lookup (ctx->field_map, key);
347 return n ? (tree) n->value : NULL_TREE;
350 static inline tree
351 maybe_lookup_field (tree var, omp_context *ctx)
353 return maybe_lookup_field ((splay_tree_key) var, ctx);
356 /* Return true if DECL should be copied by pointer. SHARED_CTX is
357 the parallel context if DECL is to be shared. */
359 static bool
360 use_pointer_for_field (tree decl, omp_context *shared_ctx)
362 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
363 || TYPE_ATOMIC (TREE_TYPE (decl)))
364 return true;
366 /* We can only use copy-in/copy-out semantics for shared variables
367 when we know the value is not accessible from an outer scope. */
368 if (shared_ctx)
370 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
372 /* ??? Trivially accessible from anywhere. But why would we even
373 be passing an address in this case? Should we simply assert
374 this to be false, or should we have a cleanup pass that removes
375 these from the list of mappings? */
376 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
377 return true;
379 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 without analyzing the expression whether or not its location
381 is accessible to anyone else. In the case of nested parallel
382 regions it certainly may be. */
383 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
384 return true;
386 /* Do not use copy-in/copy-out for variables that have their
387 address taken. */
388 if (TREE_ADDRESSABLE (decl))
389 return true;
391 /* lower_send_shared_vars only uses copy-in, but not copy-out
392 for these. */
393 if (TREE_READONLY (decl)
394 || ((TREE_CODE (decl) == RESULT_DECL
395 || TREE_CODE (decl) == PARM_DECL)
396 && DECL_BY_REFERENCE (decl)))
397 return false;
399 /* Disallow copy-in/out in nested parallel if
400 decl is shared in outer parallel, otherwise
401 each thread could store the shared variable
402 in its own copy-in location, making the
403 variable no longer really shared. */
404 if (shared_ctx->is_nested)
406 omp_context *up;
408 for (up = shared_ctx->outer; up; up = up->outer)
409 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
410 break;
412 if (up)
414 tree c;
416 for (c = gimple_omp_taskreg_clauses (up->stmt);
417 c; c = OMP_CLAUSE_CHAIN (c))
418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
419 && OMP_CLAUSE_DECL (c) == decl)
420 break;
422 if (c)
423 goto maybe_mark_addressable_and_ret;
427 /* For tasks avoid using copy-in/out. As tasks can be
428 deferred or executed in different thread, when GOMP_task
429 returns, the task hasn't necessarily terminated. */
430 if (is_task_ctx (shared_ctx))
432 tree outer;
433 maybe_mark_addressable_and_ret:
434 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
435 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
437 /* Taking address of OUTER in lower_send_shared_vars
438 might need regimplification of everything that uses the
439 variable. */
440 if (!task_shared_vars)
441 task_shared_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
443 TREE_ADDRESSABLE (outer) = 1;
445 return true;
449 return false;
452 /* Construct a new automatic decl similar to VAR. */
454 static tree
455 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
457 tree copy = copy_var_decl (var, name, type);
459 DECL_CONTEXT (copy) = current_function_decl;
460 DECL_CHAIN (copy) = ctx->block_vars;
461 /* If VAR is listed in task_shared_vars, it means it wasn't
462 originally addressable and is just because task needs to take
463 it's address. But we don't need to take address of privatizations
464 from that var. */
465 if (TREE_ADDRESSABLE (var)
466 && task_shared_vars
467 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
468 TREE_ADDRESSABLE (copy) = 0;
469 ctx->block_vars = copy;
471 return copy;
474 static tree
475 omp_copy_decl_1 (tree var, omp_context *ctx)
477 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 as appropriate. */
482 static tree
483 omp_build_component_ref (tree obj, tree field)
485 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
486 if (TREE_THIS_VOLATILE (field))
487 TREE_THIS_VOLATILE (ret) |= 1;
488 if (TREE_READONLY (field))
489 TREE_READONLY (ret) |= 1;
490 return ret;
493 /* Build tree nodes to access the field for VAR on the receiver side. */
495 static tree
496 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
498 tree x, field = lookup_field (var, ctx);
500 /* If the receiver record type was remapped in the child function,
501 remap the field into the new record type. */
502 x = maybe_lookup_field (field, ctx);
503 if (x != NULL)
504 field = x;
506 x = build_simple_mem_ref (ctx->receiver_decl);
507 TREE_THIS_NOTRAP (x) = 1;
508 x = omp_build_component_ref (x, field);
509 if (by_ref)
511 x = build_simple_mem_ref (x);
512 TREE_THIS_NOTRAP (x) = 1;
515 return x;
518 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
519 of a parallel, this is a component reference; for workshare constructs
520 this is some variable. */
522 static tree
523 build_outer_var_ref (tree var, omp_context *ctx,
524 enum omp_clause_code code = OMP_CLAUSE_ERROR)
526 tree x;
528 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
529 x = var;
530 else if (is_variable_sized (var))
532 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
533 x = build_outer_var_ref (x, ctx, code);
534 x = build_simple_mem_ref (x);
536 else if (is_taskreg_ctx (ctx))
538 bool by_ref = use_pointer_for_field (var, NULL);
539 x = build_receiver_ref (var, by_ref, ctx);
541 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
542 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
543 || (code == OMP_CLAUSE_PRIVATE
544 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
545 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
546 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
548 /* #pragma omp simd isn't a worksharing construct, and can reference
549 even private vars in its linear etc. clauses.
550 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 to private vars in all worksharing constructs. */
552 x = NULL_TREE;
553 if (ctx->outer && is_taskreg_ctx (ctx))
554 x = lookup_decl (var, ctx->outer);
555 else if (ctx->outer)
556 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
557 if (x == NULL_TREE)
558 x = var;
560 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
562 gcc_assert (ctx->outer);
563 splay_tree_node n
564 = splay_tree_lookup (ctx->outer->field_map,
565 (splay_tree_key) &DECL_UID (var));
566 if (n == NULL)
568 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
569 x = var;
570 else
571 x = lookup_decl (var, ctx->outer);
573 else
575 tree field = (tree) n->value;
576 /* If the receiver record type was remapped in the child function,
577 remap the field into the new record type. */
578 x = maybe_lookup_field (field, ctx->outer);
579 if (x != NULL)
580 field = x;
582 x = build_simple_mem_ref (ctx->outer->receiver_decl);
583 x = omp_build_component_ref (x, field);
584 if (use_pointer_for_field (var, ctx->outer))
585 x = build_simple_mem_ref (x);
588 else if (ctx->outer)
590 omp_context *outer = ctx->outer;
591 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
593 outer = outer->outer;
594 gcc_assert (outer
595 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
597 x = lookup_decl (var, outer);
599 else if (omp_is_reference (var))
600 /* This can happen with orphaned constructs. If var is reference, it is
601 possible it is shared and as such valid. */
602 x = var;
603 else if (omp_member_access_dummy_var (var))
604 x = var;
605 else
606 gcc_unreachable ();
608 if (x == var)
610 tree t = omp_member_access_dummy_var (var);
611 if (t)
613 x = DECL_VALUE_EXPR (var);
614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
615 if (o != t)
616 x = unshare_and_remap (x, t, o);
617 else
618 x = unshare_expr (x);
622 if (omp_is_reference (var))
623 x = build_simple_mem_ref (x);
625 return x;
628 /* Build tree nodes to access the field for VAR on the sender side. */
630 static tree
631 build_sender_ref (splay_tree_key key, omp_context *ctx)
633 tree field = lookup_sfield (key, ctx);
634 return omp_build_component_ref (ctx->sender_decl, field);
637 static tree
638 build_sender_ref (tree var, omp_context *ctx)
640 return build_sender_ref ((splay_tree_key) var, ctx);
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
644 BASE_POINTERS_RESTRICT, declare the field with restrict. */
646 static void
647 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
648 bool base_pointers_restrict = false)
650 tree field, type, sfield = NULL_TREE;
651 splay_tree_key key = (splay_tree_key) var;
653 if ((mask & 8) != 0)
655 key = (splay_tree_key) &DECL_UID (var);
656 gcc_checking_assert (key != (splay_tree_key) var);
658 gcc_assert ((mask & 1) == 0
659 || !splay_tree_lookup (ctx->field_map, key));
660 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
661 || !splay_tree_lookup (ctx->sfield_map, key));
662 gcc_assert ((mask & 3) == 3
663 || !is_gimple_omp_oacc (ctx->stmt));
665 type = TREE_TYPE (var);
666 /* Prevent redeclaring the var in the split-off function with a restrict
667 pointer type. Note that we only clear type itself, restrict qualifiers in
668 the pointed-to type will be ignored by points-to analysis. */
669 if (POINTER_TYPE_P (type)
670 && TYPE_RESTRICT (type))
671 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
673 if (mask & 4)
675 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
676 type = build_pointer_type (build_pointer_type (type));
678 else if (by_ref)
680 type = build_pointer_type (type);
681 if (base_pointers_restrict)
682 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
684 else if ((mask & 3) == 1 && omp_is_reference (var))
685 type = TREE_TYPE (type);
687 field = build_decl (DECL_SOURCE_LOCATION (var),
688 FIELD_DECL, DECL_NAME (var), type);
690 /* Remember what variable this field was created for. This does have a
691 side effect of making dwarf2out ignore this member, so for helpful
692 debugging we clear it later in delete_omp_context. */
693 DECL_ABSTRACT_ORIGIN (field) = var;
694 if (type == TREE_TYPE (var))
696 SET_DECL_ALIGN (field, DECL_ALIGN (var));
697 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
698 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
700 else
701 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
703 if ((mask & 3) == 3)
705 insert_field_into_struct (ctx->record_type, field);
706 if (ctx->srecord_type)
708 sfield = build_decl (DECL_SOURCE_LOCATION (var),
709 FIELD_DECL, DECL_NAME (var), type);
710 DECL_ABSTRACT_ORIGIN (sfield) = var;
711 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
712 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
713 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
714 insert_field_into_struct (ctx->srecord_type, sfield);
717 else
719 if (ctx->srecord_type == NULL_TREE)
721 tree t;
723 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
724 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
725 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
727 sfield = build_decl (DECL_SOURCE_LOCATION (t),
728 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
729 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
730 insert_field_into_struct (ctx->srecord_type, sfield);
731 splay_tree_insert (ctx->sfield_map,
732 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
733 (splay_tree_value) sfield);
736 sfield = field;
737 insert_field_into_struct ((mask & 1) ? ctx->record_type
738 : ctx->srecord_type, field);
741 if (mask & 1)
742 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
743 if ((mask & 2) && ctx->sfield_map)
744 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
747 static tree
748 install_var_local (tree var, omp_context *ctx)
750 tree new_var = omp_copy_decl_1 (var, ctx);
751 insert_decl_map (&ctx->cb, var, new_var);
752 return new_var;
755 /* Adjust the replacement for DECL in CTX for the new context. This means
756 copying the DECL_VALUE_EXPR, and fixing up the type. */
758 static void
759 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
761 tree new_decl, size;
763 new_decl = lookup_decl (decl, ctx);
765 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
767 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
768 && DECL_HAS_VALUE_EXPR_P (decl))
770 tree ve = DECL_VALUE_EXPR (decl);
771 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
772 SET_DECL_VALUE_EXPR (new_decl, ve);
773 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
776 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
778 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
779 if (size == error_mark_node)
780 size = TYPE_SIZE (TREE_TYPE (new_decl));
781 DECL_SIZE (new_decl) = size;
783 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
784 if (size == error_mark_node)
785 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
786 DECL_SIZE_UNIT (new_decl) = size;
790 /* The callback for remap_decl. Search all containing contexts for a
791 mapping of the variable; this avoids having to duplicate the splay
792 tree ahead of time. We know a mapping doesn't already exist in the
793 given context. Create new mappings to implement default semantics. */
795 static tree
796 omp_copy_decl (tree var, copy_body_data *cb)
798 omp_context *ctx = (omp_context *) cb;
799 tree new_var;
801 if (TREE_CODE (var) == LABEL_DECL)
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_DEFAULT:
1166 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1167 break;
1169 case OMP_CLAUSE_FINAL:
1170 case OMP_CLAUSE_IF:
1171 case OMP_CLAUSE_NUM_THREADS:
1172 case OMP_CLAUSE_NUM_TEAMS:
1173 case OMP_CLAUSE_THREAD_LIMIT:
1174 case OMP_CLAUSE_DEVICE:
1175 case OMP_CLAUSE_SCHEDULE:
1176 case OMP_CLAUSE_DIST_SCHEDULE:
1177 case OMP_CLAUSE_DEPEND:
1178 case OMP_CLAUSE_PRIORITY:
1179 case OMP_CLAUSE_GRAINSIZE:
1180 case OMP_CLAUSE_NUM_TASKS:
1181 case OMP_CLAUSE__CILK_FOR_COUNT_:
1182 case OMP_CLAUSE_NUM_GANGS:
1183 case OMP_CLAUSE_NUM_WORKERS:
1184 case OMP_CLAUSE_VECTOR_LENGTH:
1185 if (ctx->outer)
1186 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1187 break;
1189 case OMP_CLAUSE_TO:
1190 case OMP_CLAUSE_FROM:
1191 case OMP_CLAUSE_MAP:
1192 if (ctx->outer)
1193 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1194 decl = OMP_CLAUSE_DECL (c);
1195 /* Global variables with "omp declare target" attribute
1196 don't need to be copied, the receiver side will use them
1197 directly. However, global variables with "omp declare target link"
1198 attribute need to be copied. */
1199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1200 && DECL_P (decl)
1201 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1202 && (OMP_CLAUSE_MAP_KIND (c)
1203 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1204 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1205 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1206 && varpool_node::get_create (decl)->offloadable
1207 && !lookup_attribute ("omp declare target link",
1208 DECL_ATTRIBUTES (decl)))
1209 break;
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1211 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1213 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1214 not offloaded; there is nothing to map for those. */
1215 if (!is_gimple_omp_offloaded (ctx->stmt)
1216 && !POINTER_TYPE_P (TREE_TYPE (decl))
1217 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1218 break;
1220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1221 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1222 || (OMP_CLAUSE_MAP_KIND (c)
1223 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1225 if (TREE_CODE (decl) == COMPONENT_REF
1226 || (TREE_CODE (decl) == INDIRECT_REF
1227 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1228 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1229 == REFERENCE_TYPE)))
1230 break;
1231 if (DECL_SIZE (decl)
1232 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1234 tree decl2 = DECL_VALUE_EXPR (decl);
1235 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1236 decl2 = TREE_OPERAND (decl2, 0);
1237 gcc_assert (DECL_P (decl2));
1238 install_var_local (decl2, ctx);
1240 install_var_local (decl, ctx);
1241 break;
1243 if (DECL_P (decl))
1245 if (DECL_SIZE (decl)
1246 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1248 tree decl2 = DECL_VALUE_EXPR (decl);
1249 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1250 decl2 = TREE_OPERAND (decl2, 0);
1251 gcc_assert (DECL_P (decl2));
1252 install_var_field (decl2, true, 3, ctx);
1253 install_var_local (decl2, ctx);
1254 install_var_local (decl, ctx);
1256 else
1258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1259 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1260 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1261 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1262 install_var_field (decl, true, 7, ctx);
1263 else
1264 install_var_field (decl, true, 3, ctx,
1265 base_pointers_restrict);
1266 if (is_gimple_omp_offloaded (ctx->stmt)
1267 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1268 install_var_local (decl, ctx);
1271 else
1273 tree base = get_base_address (decl);
1274 tree nc = OMP_CLAUSE_CHAIN (c);
1275 if (DECL_P (base)
1276 && nc != NULL_TREE
1277 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1278 && OMP_CLAUSE_DECL (nc) == base
1279 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1280 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1282 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1283 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1285 else
1287 if (ctx->outer)
1289 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1290 decl = OMP_CLAUSE_DECL (c);
1292 gcc_assert (!splay_tree_lookup (ctx->field_map,
1293 (splay_tree_key) decl));
1294 tree field
1295 = build_decl (OMP_CLAUSE_LOCATION (c),
1296 FIELD_DECL, NULL_TREE, ptr_type_node);
1297 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1298 insert_field_into_struct (ctx->record_type, field);
1299 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1300 (splay_tree_value) field);
1303 break;
1305 case OMP_CLAUSE__GRIDDIM_:
1306 if (ctx->outer)
1308 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1309 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1311 break;
1313 case OMP_CLAUSE_NOWAIT:
1314 case OMP_CLAUSE_ORDERED:
1315 case OMP_CLAUSE_COLLAPSE:
1316 case OMP_CLAUSE_UNTIED:
1317 case OMP_CLAUSE_MERGEABLE:
1318 case OMP_CLAUSE_PROC_BIND:
1319 case OMP_CLAUSE_SAFELEN:
1320 case OMP_CLAUSE_SIMDLEN:
1321 case OMP_CLAUSE_THREADS:
1322 case OMP_CLAUSE_SIMD:
1323 case OMP_CLAUSE_NOGROUP:
1324 case OMP_CLAUSE_DEFAULTMAP:
1325 case OMP_CLAUSE_ASYNC:
1326 case OMP_CLAUSE_WAIT:
1327 case OMP_CLAUSE_GANG:
1328 case OMP_CLAUSE_WORKER:
1329 case OMP_CLAUSE_VECTOR:
1330 case OMP_CLAUSE_INDEPENDENT:
1331 case OMP_CLAUSE_AUTO:
1332 case OMP_CLAUSE_SEQ:
1333 case OMP_CLAUSE_TILE:
1334 case OMP_CLAUSE__SIMT_:
1335 break;
1337 case OMP_CLAUSE_ALIGNED:
1338 decl = OMP_CLAUSE_DECL (c);
1339 if (is_global_var (decl)
1340 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1341 install_var_local (decl, ctx);
1342 break;
1344 case OMP_CLAUSE__CACHE_:
1345 default:
1346 gcc_unreachable ();
1350 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1352 switch (OMP_CLAUSE_CODE (c))
1354 case OMP_CLAUSE_LASTPRIVATE:
1355 /* Let the corresponding firstprivate clause create
1356 the variable. */
1357 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1358 scan_array_reductions = true;
1359 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1360 break;
1361 /* FALLTHRU */
1363 case OMP_CLAUSE_FIRSTPRIVATE:
1364 case OMP_CLAUSE_PRIVATE:
1365 case OMP_CLAUSE_LINEAR:
1366 case OMP_CLAUSE_IS_DEVICE_PTR:
1367 decl = OMP_CLAUSE_DECL (c);
1368 if (is_variable_sized (decl))
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 tree decl2 = DECL_VALUE_EXPR (decl);
1375 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1376 decl2 = TREE_OPERAND (decl2, 0);
1377 gcc_assert (DECL_P (decl2));
1378 install_var_local (decl2, ctx);
1379 fixup_remapped_decl (decl2, ctx, false);
1381 install_var_local (decl, ctx);
1383 fixup_remapped_decl (decl, ctx,
1384 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1385 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1386 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1387 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1388 scan_array_reductions = true;
1389 break;
1391 case OMP_CLAUSE_REDUCTION:
1392 decl = OMP_CLAUSE_DECL (c);
1393 if (TREE_CODE (decl) != MEM_REF)
1395 if (is_variable_sized (decl))
1396 install_var_local (decl, ctx);
1397 fixup_remapped_decl (decl, ctx, false);
1399 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1400 scan_array_reductions = true;
1401 break;
1403 case OMP_CLAUSE_SHARED:
1404 /* Ignore shared directives in teams construct. */
1405 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1406 break;
1407 decl = OMP_CLAUSE_DECL (c);
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1409 break;
1410 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1412 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1413 ctx->outer)))
1414 break;
1415 bool by_ref = use_pointer_for_field (decl, ctx);
1416 install_var_field (decl, by_ref, 11, ctx);
1417 break;
1419 fixup_remapped_decl (decl, ctx, false);
1420 break;
1422 case OMP_CLAUSE_MAP:
1423 if (!is_gimple_omp_offloaded (ctx->stmt))
1424 break;
1425 decl = OMP_CLAUSE_DECL (c);
1426 if (DECL_P (decl)
1427 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1428 && (OMP_CLAUSE_MAP_KIND (c)
1429 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1430 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1431 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1432 && varpool_node::get_create (decl)->offloadable)
1433 break;
1434 if (DECL_P (decl))
1436 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1437 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1438 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1439 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1441 tree new_decl = lookup_decl (decl, ctx);
1442 TREE_TYPE (new_decl)
1443 = remap_type (TREE_TYPE (decl), &ctx->cb);
1445 else if (DECL_SIZE (decl)
1446 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1448 tree decl2 = DECL_VALUE_EXPR (decl);
1449 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1450 decl2 = TREE_OPERAND (decl2, 0);
1451 gcc_assert (DECL_P (decl2));
1452 fixup_remapped_decl (decl2, ctx, false);
1453 fixup_remapped_decl (decl, ctx, true);
1455 else
1456 fixup_remapped_decl (decl, ctx, false);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 case OMP_CLAUSE_DEFAULT:
1463 case OMP_CLAUSE_IF:
1464 case OMP_CLAUSE_NUM_THREADS:
1465 case OMP_CLAUSE_NUM_TEAMS:
1466 case OMP_CLAUSE_THREAD_LIMIT:
1467 case OMP_CLAUSE_DEVICE:
1468 case OMP_CLAUSE_SCHEDULE:
1469 case OMP_CLAUSE_DIST_SCHEDULE:
1470 case OMP_CLAUSE_NOWAIT:
1471 case OMP_CLAUSE_ORDERED:
1472 case OMP_CLAUSE_COLLAPSE:
1473 case OMP_CLAUSE_UNTIED:
1474 case OMP_CLAUSE_FINAL:
1475 case OMP_CLAUSE_MERGEABLE:
1476 case OMP_CLAUSE_PROC_BIND:
1477 case OMP_CLAUSE_SAFELEN:
1478 case OMP_CLAUSE_SIMDLEN:
1479 case OMP_CLAUSE_ALIGNED:
1480 case OMP_CLAUSE_DEPEND:
1481 case OMP_CLAUSE__LOOPTEMP_:
1482 case OMP_CLAUSE_TO:
1483 case OMP_CLAUSE_FROM:
1484 case OMP_CLAUSE_PRIORITY:
1485 case OMP_CLAUSE_GRAINSIZE:
1486 case OMP_CLAUSE_NUM_TASKS:
1487 case OMP_CLAUSE_THREADS:
1488 case OMP_CLAUSE_SIMD:
1489 case OMP_CLAUSE_NOGROUP:
1490 case OMP_CLAUSE_DEFAULTMAP:
1491 case OMP_CLAUSE_USE_DEVICE_PTR:
1492 case OMP_CLAUSE__CILK_FOR_COUNT_:
1493 case OMP_CLAUSE_ASYNC:
1494 case OMP_CLAUSE_WAIT:
1495 case OMP_CLAUSE_NUM_GANGS:
1496 case OMP_CLAUSE_NUM_WORKERS:
1497 case OMP_CLAUSE_VECTOR_LENGTH:
1498 case OMP_CLAUSE_GANG:
1499 case OMP_CLAUSE_WORKER:
1500 case OMP_CLAUSE_VECTOR:
1501 case OMP_CLAUSE_INDEPENDENT:
1502 case OMP_CLAUSE_AUTO:
1503 case OMP_CLAUSE_SEQ:
1504 case OMP_CLAUSE_TILE:
1505 case OMP_CLAUSE__GRIDDIM_:
1506 case OMP_CLAUSE__SIMT_:
1507 break;
1509 case OMP_CLAUSE__CACHE_:
1510 default:
1511 gcc_unreachable ();
1515 gcc_checking_assert (!scan_array_reductions
1516 || !is_gimple_omp_oacc (ctx->stmt));
1517 if (scan_array_reductions)
1519 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1521 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1523 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1524 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1527 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1529 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1530 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1531 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1535 /* Create a new name for omp child function. Returns an identifier. If
1536 IS_CILK_FOR is true then the suffix for the child function is
1537 "_cilk_for_fn." */
1539 static tree
1540 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1542 if (is_cilk_for)
1543 return clone_function_name (current_function_decl, "_cilk_for_fn");
1544 return clone_function_name (current_function_decl,
1545 task_copy ? "_omp_cpyfn" : "_omp_fn");
1548 /* Returns the type of the induction variable for the child function for
1549 _Cilk_for and the types for _high and _low variables based on TYPE. */
1551 static tree
1552 cilk_for_check_loop_diff_type (tree type)
1554 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1556 if (TYPE_UNSIGNED (type))
1557 return uint32_type_node;
1558 else
1559 return integer_type_node;
1561 else
1563 if (TYPE_UNSIGNED (type))
1564 return uint64_type_node;
1565 else
1566 return long_long_integer_type_node;
1570 /* Return true if CTX may belong to offloaded code: either if current function
1571 is offloaded, or any enclosing context corresponds to a target region. */
1573 static bool
1574 omp_maybe_offloaded_ctx (omp_context *ctx)
1576 if (cgraph_node::get (current_function_decl)->offloadable)
1577 return true;
1578 for (; ctx; ctx = ctx->outer)
1579 if (is_gimple_omp_offloaded (ctx->stmt))
1580 return true;
1581 return false;
1584 /* Build a decl for the omp child function. It'll not contain a body
1585 yet, just the bare decl. */
1587 static void
1588 create_omp_child_function (omp_context *ctx, bool task_copy)
1590 tree decl, type, name, t;
1592 tree cilk_for_count
1593 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1594 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1595 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1596 tree cilk_var_type = NULL_TREE;
1598 name = create_omp_child_function_name (task_copy,
1599 cilk_for_count != NULL_TREE);
1600 if (task_copy)
1601 type = build_function_type_list (void_type_node, ptr_type_node,
1602 ptr_type_node, NULL_TREE);
1603 else if (cilk_for_count)
1605 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1606 cilk_var_type = cilk_for_check_loop_diff_type (type);
1607 type = build_function_type_list (void_type_node, ptr_type_node,
1608 cilk_var_type, cilk_var_type, NULL_TREE);
1610 else
1611 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1613 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1615 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1616 || !task_copy);
1617 if (!task_copy)
1618 ctx->cb.dst_fn = decl;
1619 else
1620 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1622 TREE_STATIC (decl) = 1;
1623 TREE_USED (decl) = 1;
1624 DECL_ARTIFICIAL (decl) = 1;
1625 DECL_IGNORED_P (decl) = 0;
1626 TREE_PUBLIC (decl) = 0;
1627 DECL_UNINLINABLE (decl) = 1;
1628 DECL_EXTERNAL (decl) = 0;
1629 DECL_CONTEXT (decl) = NULL_TREE;
1630 DECL_INITIAL (decl) = make_node (BLOCK);
1631 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1632 if (omp_maybe_offloaded_ctx (ctx))
1634 cgraph_node::get_create (decl)->offloadable = 1;
1635 if (ENABLE_OFFLOADING)
1636 g->have_offload = true;
1639 if (cgraph_node::get_create (decl)->offloadable
1640 && !lookup_attribute ("omp declare target",
1641 DECL_ATTRIBUTES (current_function_decl)))
1643 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1644 ? "omp target entrypoint"
1645 : "omp declare target");
1646 DECL_ATTRIBUTES (decl)
1647 = tree_cons (get_identifier (target_attr),
1648 NULL_TREE, DECL_ATTRIBUTES (decl));
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 RESULT_DECL, NULL_TREE, void_type_node);
1653 DECL_ARTIFICIAL (t) = 1;
1654 DECL_IGNORED_P (t) = 1;
1655 DECL_CONTEXT (t) = decl;
1656 DECL_RESULT (decl) = t;
1658 /* _Cilk_for's child function requires two extra parameters called
1659 __low and __high that are set the by Cilk runtime when it calls this
1660 function. */
1661 if (cilk_for_count)
1663 t = build_decl (DECL_SOURCE_LOCATION (decl),
1664 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1665 DECL_ARTIFICIAL (t) = 1;
1666 DECL_NAMELESS (t) = 1;
1667 DECL_ARG_TYPE (t) = ptr_type_node;
1668 DECL_CONTEXT (t) = current_function_decl;
1669 TREE_USED (t) = 1;
1670 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1671 DECL_ARGUMENTS (decl) = t;
1673 t = build_decl (DECL_SOURCE_LOCATION (decl),
1674 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1675 DECL_ARTIFICIAL (t) = 1;
1676 DECL_NAMELESS (t) = 1;
1677 DECL_ARG_TYPE (t) = ptr_type_node;
1678 DECL_CONTEXT (t) = current_function_decl;
1679 TREE_USED (t) = 1;
1680 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1681 DECL_ARGUMENTS (decl) = t;
1684 tree data_name = get_identifier (".omp_data_i");
1685 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1686 ptr_type_node);
1687 DECL_ARTIFICIAL (t) = 1;
1688 DECL_NAMELESS (t) = 1;
1689 DECL_ARG_TYPE (t) = ptr_type_node;
1690 DECL_CONTEXT (t) = current_function_decl;
1691 TREE_USED (t) = 1;
1692 TREE_READONLY (t) = 1;
1693 if (cilk_for_count)
1694 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1695 DECL_ARGUMENTS (decl) = t;
1696 if (!task_copy)
1697 ctx->receiver_decl = t;
1698 else
1700 t = build_decl (DECL_SOURCE_LOCATION (decl),
1701 PARM_DECL, get_identifier (".omp_data_o"),
1702 ptr_type_node);
1703 DECL_ARTIFICIAL (t) = 1;
1704 DECL_NAMELESS (t) = 1;
1705 DECL_ARG_TYPE (t) = ptr_type_node;
1706 DECL_CONTEXT (t) = current_function_decl;
1707 TREE_USED (t) = 1;
1708 TREE_ADDRESSABLE (t) = 1;
1709 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1710 DECL_ARGUMENTS (decl) = t;
1713 /* Allocate memory for the function structure. The call to
1714 allocate_struct_function clobbers CFUN, so we need to restore
1715 it afterward. */
1716 push_struct_function (decl);
1717 cfun->function_end_locus = gimple_location (ctx->stmt);
1718 init_tree_ssa (cfun);
1719 pop_cfun ();
1722 /* Callback for walk_gimple_seq. Check if combined parallel
1723 contains gimple_omp_for_combined_into_p OMP_FOR. */
1725 tree
1726 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1727 bool *handled_ops_p,
1728 struct walk_stmt_info *wi)
1730 gimple *stmt = gsi_stmt (*gsi_p);
1732 *handled_ops_p = true;
1733 switch (gimple_code (stmt))
1735 WALK_SUBSTMTS;
1737 case GIMPLE_OMP_FOR:
1738 if (gimple_omp_for_combined_into_p (stmt)
1739 && gimple_omp_for_kind (stmt)
1740 == *(const enum gf_mask *) (wi->info))
1742 wi->info = stmt;
1743 return integer_zero_node;
1745 break;
1746 default:
1747 break;
1749 return NULL;
1752 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1754 static void
1755 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1756 omp_context *outer_ctx)
1758 struct walk_stmt_info wi;
1760 memset (&wi, 0, sizeof (wi));
1761 wi.val_only = true;
1762 wi.info = (void *) &msk;
1763 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1764 if (wi.info != (void *) &msk)
1766 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1767 struct omp_for_data fd;
1768 omp_extract_for_data (for_stmt, &fd, NULL);
1769 /* We need two temporaries with fd.loop.v type (istart/iend)
1770 and then (fd.collapse - 1) temporaries with the same
1771 type for count2 ... countN-1 vars if not constant. */
1772 size_t count = 2, i;
1773 tree type = fd.iter_type;
1774 if (fd.collapse > 1
1775 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1777 count += fd.collapse - 1;
1778 /* If there are lastprivate clauses on the inner
1779 GIMPLE_OMP_FOR, add one more temporaries for the total number
1780 of iterations (product of count1 ... countN-1). */
1781 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1782 OMP_CLAUSE_LASTPRIVATE))
1783 count++;
1784 else if (msk == GF_OMP_FOR_KIND_FOR
1785 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1786 OMP_CLAUSE_LASTPRIVATE))
1787 count++;
1789 for (i = 0; i < count; i++)
1791 tree temp = create_tmp_var (type);
1792 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1793 insert_decl_map (&outer_ctx->cb, temp, temp);
1794 OMP_CLAUSE_DECL (c) = temp;
1795 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1796 gimple_omp_taskreg_set_clauses (stmt, c);
1801 /* Scan an OpenMP parallel directive. */
1803 static void
1804 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 omp_context *ctx;
1807 tree name;
1808 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1810 /* Ignore parallel directives with empty bodies, unless there
1811 are copyin clauses. */
1812 if (optimize > 0
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1815 OMP_CLAUSE_COPYIN) == NULL)
1817 gsi_replace (gsi, gimple_build_nop (), false);
1818 return;
1821 if (gimple_omp_parallel_combined_p (stmt))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1824 ctx = new_omp_context (stmt, outer_ctx);
1825 taskreg_contexts.safe_push (ctx);
1826 if (taskreg_nesting_level > 1)
1827 ctx->is_nested = true;
1828 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1829 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1830 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1831 name = create_tmp_var_name (".omp_data_s");
1832 name = build_decl (gimple_location (stmt),
1833 TYPE_DECL, name, ctx->record_type);
1834 DECL_ARTIFICIAL (name) = 1;
1835 DECL_NAMELESS (name) = 1;
1836 TYPE_NAME (ctx->record_type) = name;
1837 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1838 if (!gimple_omp_parallel_grid_phony (stmt))
1840 create_omp_child_function (ctx, false);
1841 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1844 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1845 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1847 if (TYPE_FIELDS (ctx->record_type) == NULL)
1848 ctx->record_type = ctx->receiver_decl = NULL;
1851 /* Scan an OpenMP task directive. */
1853 static void
1854 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1856 omp_context *ctx;
1857 tree name, t;
1858 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1860 /* Ignore task directives with empty bodies. */
1861 if (optimize > 0
1862 && empty_body_p (gimple_omp_body (stmt)))
1864 gsi_replace (gsi, gimple_build_nop (), false);
1865 return;
1868 if (gimple_omp_task_taskloop_p (stmt))
1869 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1871 ctx = new_omp_context (stmt, outer_ctx);
1872 taskreg_contexts.safe_push (ctx);
1873 if (taskreg_nesting_level > 1)
1874 ctx->is_nested = true;
1875 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1876 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1877 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1878 name = create_tmp_var_name (".omp_data_s");
1879 name = build_decl (gimple_location (stmt),
1880 TYPE_DECL, name, ctx->record_type);
1881 DECL_ARTIFICIAL (name) = 1;
1882 DECL_NAMELESS (name) = 1;
1883 TYPE_NAME (ctx->record_type) = name;
1884 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1885 create_omp_child_function (ctx, false);
1886 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1888 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1890 if (ctx->srecord_type)
1892 name = create_tmp_var_name (".omp_data_a");
1893 name = build_decl (gimple_location (stmt),
1894 TYPE_DECL, name, ctx->srecord_type);
1895 DECL_ARTIFICIAL (name) = 1;
1896 DECL_NAMELESS (name) = 1;
1897 TYPE_NAME (ctx->srecord_type) = name;
1898 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1899 create_omp_child_function (ctx, true);
1902 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1904 if (TYPE_FIELDS (ctx->record_type) == NULL)
1906 ctx->record_type = ctx->receiver_decl = NULL;
1907 t = build_int_cst (long_integer_type_node, 0);
1908 gimple_omp_task_set_arg_size (stmt, t);
1909 t = build_int_cst (long_integer_type_node, 1);
1910 gimple_omp_task_set_arg_align (stmt, t);
1915 /* If any decls have been made addressable during scan_omp,
1916 adjust their fields if needed, and layout record types
1917 of parallel/task constructs. */
1919 static void
1920 finish_taskreg_scan (omp_context *ctx)
1922 if (ctx->record_type == NULL_TREE)
1923 return;
1925 /* If any task_shared_vars were needed, verify all
1926 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1927 statements if use_pointer_for_field hasn't changed
1928 because of that. If it did, update field types now. */
1929 if (task_shared_vars)
1931 tree c;
1933 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1934 c; c = OMP_CLAUSE_CHAIN (c))
1935 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1936 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1938 tree decl = OMP_CLAUSE_DECL (c);
1940 /* Global variables don't need to be copied,
1941 the receiver side will use them directly. */
1942 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1943 continue;
1944 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1945 || !use_pointer_for_field (decl, ctx))
1946 continue;
1947 tree field = lookup_field (decl, ctx);
1948 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1949 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1950 continue;
1951 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1952 TREE_THIS_VOLATILE (field) = 0;
1953 DECL_USER_ALIGN (field) = 0;
1954 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1955 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1956 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1957 if (ctx->srecord_type)
1959 tree sfield = lookup_sfield (decl, ctx);
1960 TREE_TYPE (sfield) = TREE_TYPE (field);
1961 TREE_THIS_VOLATILE (sfield) = 0;
1962 DECL_USER_ALIGN (sfield) = 0;
1963 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1964 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1965 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1970 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1972 layout_type (ctx->record_type);
1973 fixup_child_record_type (ctx);
1975 else
1977 location_t loc = gimple_location (ctx->stmt);
1978 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1979 /* Move VLA fields to the end. */
1980 p = &TYPE_FIELDS (ctx->record_type);
1981 while (*p)
1982 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1983 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1985 *q = *p;
1986 *p = TREE_CHAIN (*p);
1987 TREE_CHAIN (*q) = NULL_TREE;
1988 q = &TREE_CHAIN (*q);
1990 else
1991 p = &DECL_CHAIN (*p);
1992 *p = vla_fields;
1993 if (gimple_omp_task_taskloop_p (ctx->stmt))
1995 /* Move fields corresponding to first and second _looptemp_
1996 clause first. There are filled by GOMP_taskloop
1997 and thus need to be in specific positions. */
1998 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1999 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2000 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2001 OMP_CLAUSE__LOOPTEMP_);
2002 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2003 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2004 p = &TYPE_FIELDS (ctx->record_type);
2005 while (*p)
2006 if (*p == f1 || *p == f2)
2007 *p = DECL_CHAIN (*p);
2008 else
2009 p = &DECL_CHAIN (*p);
2010 DECL_CHAIN (f1) = f2;
2011 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2012 TYPE_FIELDS (ctx->record_type) = f1;
2013 if (ctx->srecord_type)
2015 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2016 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2017 p = &TYPE_FIELDS (ctx->srecord_type);
2018 while (*p)
2019 if (*p == f1 || *p == f2)
2020 *p = DECL_CHAIN (*p);
2021 else
2022 p = &DECL_CHAIN (*p);
2023 DECL_CHAIN (f1) = f2;
2024 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2025 TYPE_FIELDS (ctx->srecord_type) = f1;
2028 layout_type (ctx->record_type);
2029 fixup_child_record_type (ctx);
2030 if (ctx->srecord_type)
2031 layout_type (ctx->srecord_type);
2032 tree t = fold_convert_loc (loc, long_integer_type_node,
2033 TYPE_SIZE_UNIT (ctx->record_type));
2034 gimple_omp_task_set_arg_size (ctx->stmt, t);
2035 t = build_int_cst (long_integer_type_node,
2036 TYPE_ALIGN_UNIT (ctx->record_type));
2037 gimple_omp_task_set_arg_align (ctx->stmt, t);
2041 /* Find the enclosing offload context. */
2043 static omp_context *
2044 enclosing_target_ctx (omp_context *ctx)
2046 for (; ctx; ctx = ctx->outer)
2047 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2048 break;
2050 return ctx;
2053 /* Return true if ctx is part of an oacc kernels region. */
2055 static bool
2056 ctx_in_oacc_kernels_region (omp_context *ctx)
2058 for (;ctx != NULL; ctx = ctx->outer)
2060 gimple *stmt = ctx->stmt;
2061 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2062 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2063 return true;
2066 return false;
2069 /* Check the parallelism clauses inside a kernels regions.
2070 Until kernels handling moves to use the same loop indirection
2071 scheme as parallel, we need to do this checking early. */
2073 static unsigned
2074 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2076 bool checking = true;
2077 unsigned outer_mask = 0;
2078 unsigned this_mask = 0;
2079 bool has_seq = false, has_auto = false;
2081 if (ctx->outer)
2082 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2083 if (!stmt)
2085 checking = false;
2086 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2087 return outer_mask;
2088 stmt = as_a <gomp_for *> (ctx->stmt);
2091 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2093 switch (OMP_CLAUSE_CODE (c))
2095 case OMP_CLAUSE_GANG:
2096 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2097 break;
2098 case OMP_CLAUSE_WORKER:
2099 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2100 break;
2101 case OMP_CLAUSE_VECTOR:
2102 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2103 break;
2104 case OMP_CLAUSE_SEQ:
2105 has_seq = true;
2106 break;
2107 case OMP_CLAUSE_AUTO:
2108 has_auto = true;
2109 break;
2110 default:
2111 break;
2115 if (checking)
2117 if (has_seq && (this_mask || has_auto))
2118 error_at (gimple_location (stmt), "%<seq%> overrides other"
2119 " OpenACC loop specifiers");
2120 else if (has_auto && this_mask)
2121 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2122 " OpenACC loop specifiers");
2124 if (this_mask & outer_mask)
2125 error_at (gimple_location (stmt), "inner loop uses same"
2126 " OpenACC parallelism as containing loop");
2129 return outer_mask | this_mask;
2132 /* Scan a GIMPLE_OMP_FOR. */
2134 static omp_context *
2135 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2137 omp_context *ctx;
2138 size_t i;
2139 tree clauses = gimple_omp_for_clauses (stmt);
2141 ctx = new_omp_context (stmt, outer_ctx);
2143 if (is_gimple_omp_oacc (stmt))
2145 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2147 if (!tgt || is_oacc_parallel (tgt))
2148 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2150 char const *check = NULL;
2152 switch (OMP_CLAUSE_CODE (c))
2154 case OMP_CLAUSE_GANG:
2155 check = "gang";
2156 break;
2158 case OMP_CLAUSE_WORKER:
2159 check = "worker";
2160 break;
2162 case OMP_CLAUSE_VECTOR:
2163 check = "vector";
2164 break;
2166 default:
2167 break;
2170 if (check && OMP_CLAUSE_OPERAND (c, 0))
2171 error_at (gimple_location (stmt),
2172 "argument not permitted on %qs clause in"
2173 " OpenACC %<parallel%>", check);
2176 if (tgt && is_oacc_kernels (tgt))
2178 /* Strip out reductions, as they are not handled yet. */
2179 tree *prev_ptr = &clauses;
2181 while (tree probe = *prev_ptr)
2183 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2185 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2186 *prev_ptr = *next_ptr;
2187 else
2188 prev_ptr = next_ptr;
2191 gimple_omp_for_set_clauses (stmt, clauses);
2192 check_oacc_kernel_gwv (stmt, ctx);
2196 scan_sharing_clauses (clauses, ctx);
2198 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2199 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2201 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2202 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2203 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2204 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2206 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2207 return ctx;
2210 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2212 static void
2213 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2214 omp_context *outer_ctx)
2216 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2217 gsi_replace (gsi, bind, false);
2218 gimple_seq seq = NULL;
2219 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2220 tree cond = create_tmp_var_raw (integer_type_node);
2221 DECL_CONTEXT (cond) = current_function_decl;
2222 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2223 gimple_bind_set_vars (bind, cond);
2224 gimple_call_set_lhs (g, cond);
2225 gimple_seq_add_stmt (&seq, g);
2226 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2227 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2228 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2229 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2230 gimple_seq_add_stmt (&seq, g);
2231 g = gimple_build_label (lab1);
2232 gimple_seq_add_stmt (&seq, g);
2233 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2234 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2235 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2236 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2237 gimple_omp_for_set_clauses (new_stmt, clause);
2238 gimple_seq_add_stmt (&seq, new_stmt);
2239 g = gimple_build_goto (lab3);
2240 gimple_seq_add_stmt (&seq, g);
2241 g = gimple_build_label (lab2);
2242 gimple_seq_add_stmt (&seq, g);
2243 gimple_seq_add_stmt (&seq, stmt);
2244 g = gimple_build_label (lab3);
2245 gimple_seq_add_stmt (&seq, g);
2246 gimple_bind_set_body (bind, seq);
2247 update_stmt (bind);
2248 scan_omp_for (new_stmt, outer_ctx);
2249 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2252 /* Scan an OpenMP sections directive. */
2254 static void
2255 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2257 omp_context *ctx;
2259 ctx = new_omp_context (stmt, outer_ctx);
2260 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2261 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2264 /* Scan an OpenMP single directive. */
2266 static void
2267 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2269 omp_context *ctx;
2270 tree name;
2272 ctx = new_omp_context (stmt, outer_ctx);
2273 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2274 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2275 name = create_tmp_var_name (".omp_copy_s");
2276 name = build_decl (gimple_location (stmt),
2277 TYPE_DECL, name, ctx->record_type);
2278 TYPE_NAME (ctx->record_type) = name;
2280 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2281 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2283 if (TYPE_FIELDS (ctx->record_type) == NULL)
2284 ctx->record_type = NULL;
2285 else
2286 layout_type (ctx->record_type);
2289 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2290 used in the corresponding offloaded function are restrict. */
2292 static bool
2293 omp_target_base_pointers_restrict_p (tree clauses)
2295 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2296 used by OpenACC. */
2297 if (flag_openacc == 0)
2298 return false;
2300 /* I. Basic example:
2302 void foo (void)
2304 unsigned int a[2], b[2];
2306 #pragma acc kernels \
2307 copyout (a) \
2308 copyout (b)
2310 a[0] = 0;
2311 b[0] = 1;
2315 After gimplification, we have:
2317 #pragma omp target oacc_kernels \
2318 map(force_from:a [len: 8]) \
2319 map(force_from:b [len: 8])
2321 a[0] = 0;
2322 b[0] = 1;
2325 Because both mappings have the force prefix, we know that they will be
2326 allocated when calling the corresponding offloaded function, which means we
2327 can mark the base pointers for a and b in the offloaded function as
2328 restrict. */
2330 tree c;
2331 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2333 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2334 return false;
2336 switch (OMP_CLAUSE_MAP_KIND (c))
2338 case GOMP_MAP_FORCE_ALLOC:
2339 case GOMP_MAP_FORCE_TO:
2340 case GOMP_MAP_FORCE_FROM:
2341 case GOMP_MAP_FORCE_TOFROM:
2342 break;
2343 default:
2344 return false;
2348 return true;
2351 /* Scan a GIMPLE_OMP_TARGET. */
2353 static void
2354 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2356 omp_context *ctx;
2357 tree name;
2358 bool offloaded = is_gimple_omp_offloaded (stmt);
2359 tree clauses = gimple_omp_target_clauses (stmt);
2361 ctx = new_omp_context (stmt, outer_ctx);
2362 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2363 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2364 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2365 name = create_tmp_var_name (".omp_data_t");
2366 name = build_decl (gimple_location (stmt),
2367 TYPE_DECL, name, ctx->record_type);
2368 DECL_ARTIFICIAL (name) = 1;
2369 DECL_NAMELESS (name) = 1;
2370 TYPE_NAME (ctx->record_type) = name;
2371 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2373 bool base_pointers_restrict = false;
2374 if (offloaded)
2376 create_omp_child_function (ctx, false);
2377 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2379 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2380 if (base_pointers_restrict
2381 && dump_file && (dump_flags & TDF_DETAILS))
2382 fprintf (dump_file,
2383 "Base pointers in offloaded function are restrict\n");
2386 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2387 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2389 if (TYPE_FIELDS (ctx->record_type) == NULL)
2390 ctx->record_type = ctx->receiver_decl = NULL;
2391 else
2393 TYPE_FIELDS (ctx->record_type)
2394 = nreverse (TYPE_FIELDS (ctx->record_type));
2395 if (flag_checking)
2397 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2398 for (tree field = TYPE_FIELDS (ctx->record_type);
2399 field;
2400 field = DECL_CHAIN (field))
2401 gcc_assert (DECL_ALIGN (field) == align);
2403 layout_type (ctx->record_type);
2404 if (offloaded)
2405 fixup_child_record_type (ctx);
2409 /* Scan an OpenMP teams directive. */
2411 static void
2412 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2414 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2415 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2416 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2419 /* Check nesting restrictions. */
2420 static bool
2421 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2423 tree c;
2425 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2426 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2427 the original copy of its contents. */
2428 return true;
2430 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2431 inside an OpenACC CTX. */
2432 if (!(is_gimple_omp (stmt)
2433 && is_gimple_omp_oacc (stmt))
2434 /* Except for atomic codes that we share with OpenMP. */
2435 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2436 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2438 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2440 error_at (gimple_location (stmt),
2441 "non-OpenACC construct inside of OpenACC routine");
2442 return false;
2444 else
2445 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2446 if (is_gimple_omp (octx->stmt)
2447 && is_gimple_omp_oacc (octx->stmt))
2449 error_at (gimple_location (stmt),
2450 "non-OpenACC construct inside of OpenACC region");
2451 return false;
2455 if (ctx != NULL)
2457 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2458 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2460 c = NULL_TREE;
2461 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2463 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2464 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2466 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2467 && (ctx->outer == NULL
2468 || !gimple_omp_for_combined_into_p (ctx->stmt)
2469 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2470 || (gimple_omp_for_kind (ctx->outer->stmt)
2471 != GF_OMP_FOR_KIND_FOR)
2472 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2474 error_at (gimple_location (stmt),
2475 "%<ordered simd threads%> must be closely "
2476 "nested inside of %<for simd%> region");
2477 return false;
2479 return true;
2482 error_at (gimple_location (stmt),
2483 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2484 " may not be nested inside %<simd%> region");
2485 return false;
2487 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2489 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2490 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2491 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2492 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2494 error_at (gimple_location (stmt),
2495 "only %<distribute%> or %<parallel%> regions are "
2496 "allowed to be strictly nested inside %<teams%> "
2497 "region");
2498 return false;
2502 switch (gimple_code (stmt))
2504 case GIMPLE_OMP_FOR:
2505 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2506 return true;
2507 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2509 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2511 error_at (gimple_location (stmt),
2512 "%<distribute%> region must be strictly nested "
2513 "inside %<teams%> construct");
2514 return false;
2516 return true;
2518 /* We split taskloop into task and nested taskloop in it. */
2519 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2520 return true;
2521 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2523 bool ok = false;
2525 if (ctx)
2526 switch (gimple_code (ctx->stmt))
2528 case GIMPLE_OMP_FOR:
2529 ok = (gimple_omp_for_kind (ctx->stmt)
2530 == GF_OMP_FOR_KIND_OACC_LOOP);
2531 break;
2533 case GIMPLE_OMP_TARGET:
2534 switch (gimple_omp_target_kind (ctx->stmt))
2536 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2537 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2538 ok = true;
2539 break;
2541 default:
2542 break;
2545 default:
2546 break;
2548 else if (oacc_get_fn_attrib (current_function_decl))
2549 ok = true;
2550 if (!ok)
2552 error_at (gimple_location (stmt),
2553 "OpenACC loop directive must be associated with"
2554 " an OpenACC compute region");
2555 return false;
2558 /* FALLTHRU */
2559 case GIMPLE_CALL:
2560 if (is_gimple_call (stmt)
2561 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2562 == BUILT_IN_GOMP_CANCEL
2563 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2564 == BUILT_IN_GOMP_CANCELLATION_POINT))
2566 const char *bad = NULL;
2567 const char *kind = NULL;
2568 const char *construct
2569 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2570 == BUILT_IN_GOMP_CANCEL)
2571 ? "#pragma omp cancel"
2572 : "#pragma omp cancellation point";
2573 if (ctx == NULL)
2575 error_at (gimple_location (stmt), "orphaned %qs construct",
2576 construct);
2577 return false;
2579 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2580 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2581 : 0)
2583 case 1:
2584 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2585 bad = "#pragma omp parallel";
2586 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2587 == BUILT_IN_GOMP_CANCEL
2588 && !integer_zerop (gimple_call_arg (stmt, 1)))
2589 ctx->cancellable = true;
2590 kind = "parallel";
2591 break;
2592 case 2:
2593 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2594 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2595 bad = "#pragma omp for";
2596 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2597 == BUILT_IN_GOMP_CANCEL
2598 && !integer_zerop (gimple_call_arg (stmt, 1)))
2600 ctx->cancellable = true;
2601 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2602 OMP_CLAUSE_NOWAIT))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel for%> inside "
2605 "%<nowait%> for construct");
2606 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2607 OMP_CLAUSE_ORDERED))
2608 warning_at (gimple_location (stmt), 0,
2609 "%<#pragma omp cancel for%> inside "
2610 "%<ordered%> for construct");
2612 kind = "for";
2613 break;
2614 case 4:
2615 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2616 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2617 bad = "#pragma omp sections";
2618 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2619 == BUILT_IN_GOMP_CANCEL
2620 && !integer_zerop (gimple_call_arg (stmt, 1)))
2622 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2624 ctx->cancellable = true;
2625 if (omp_find_clause (gimple_omp_sections_clauses
2626 (ctx->stmt),
2627 OMP_CLAUSE_NOWAIT))
2628 warning_at (gimple_location (stmt), 0,
2629 "%<#pragma omp cancel sections%> inside "
2630 "%<nowait%> sections construct");
2632 else
2634 gcc_assert (ctx->outer
2635 && gimple_code (ctx->outer->stmt)
2636 == GIMPLE_OMP_SECTIONS);
2637 ctx->outer->cancellable = true;
2638 if (omp_find_clause (gimple_omp_sections_clauses
2639 (ctx->outer->stmt),
2640 OMP_CLAUSE_NOWAIT))
2641 warning_at (gimple_location (stmt), 0,
2642 "%<#pragma omp cancel sections%> inside "
2643 "%<nowait%> sections construct");
2646 kind = "sections";
2647 break;
2648 case 8:
2649 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2650 bad = "#pragma omp task";
2651 else
2653 for (omp_context *octx = ctx->outer;
2654 octx; octx = octx->outer)
2656 switch (gimple_code (octx->stmt))
2658 case GIMPLE_OMP_TASKGROUP:
2659 break;
2660 case GIMPLE_OMP_TARGET:
2661 if (gimple_omp_target_kind (octx->stmt)
2662 != GF_OMP_TARGET_KIND_REGION)
2663 continue;
2664 /* FALLTHRU */
2665 case GIMPLE_OMP_PARALLEL:
2666 case GIMPLE_OMP_TEAMS:
2667 error_at (gimple_location (stmt),
2668 "%<%s taskgroup%> construct not closely "
2669 "nested inside of %<taskgroup%> region",
2670 construct);
2671 return false;
2672 default:
2673 continue;
2675 break;
2677 ctx->cancellable = true;
2679 kind = "taskgroup";
2680 break;
2681 default:
2682 error_at (gimple_location (stmt), "invalid arguments");
2683 return false;
2685 if (bad)
2687 error_at (gimple_location (stmt),
2688 "%<%s %s%> construct not closely nested inside of %qs",
2689 construct, kind, bad);
2690 return false;
2693 /* FALLTHRU */
2694 case GIMPLE_OMP_SECTIONS:
2695 case GIMPLE_OMP_SINGLE:
2696 for (; ctx != NULL; ctx = ctx->outer)
2697 switch (gimple_code (ctx->stmt))
2699 case GIMPLE_OMP_FOR:
2700 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2701 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2702 break;
2703 /* FALLTHRU */
2704 case GIMPLE_OMP_SECTIONS:
2705 case GIMPLE_OMP_SINGLE:
2706 case GIMPLE_OMP_ORDERED:
2707 case GIMPLE_OMP_MASTER:
2708 case GIMPLE_OMP_TASK:
2709 case GIMPLE_OMP_CRITICAL:
2710 if (is_gimple_call (stmt))
2712 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2713 != BUILT_IN_GOMP_BARRIER)
2714 return true;
2715 error_at (gimple_location (stmt),
2716 "barrier region may not be closely nested inside "
2717 "of work-sharing, %<critical%>, %<ordered%>, "
2718 "%<master%>, explicit %<task%> or %<taskloop%> "
2719 "region");
2720 return false;
2722 error_at (gimple_location (stmt),
2723 "work-sharing region may not be closely nested inside "
2724 "of work-sharing, %<critical%>, %<ordered%>, "
2725 "%<master%>, explicit %<task%> or %<taskloop%> region");
2726 return false;
2727 case GIMPLE_OMP_PARALLEL:
2728 case GIMPLE_OMP_TEAMS:
2729 return true;
2730 case GIMPLE_OMP_TARGET:
2731 if (gimple_omp_target_kind (ctx->stmt)
2732 == GF_OMP_TARGET_KIND_REGION)
2733 return true;
2734 break;
2735 default:
2736 break;
2738 break;
2739 case GIMPLE_OMP_MASTER:
2740 for (; ctx != NULL; ctx = ctx->outer)
2741 switch (gimple_code (ctx->stmt))
2743 case GIMPLE_OMP_FOR:
2744 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2745 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2746 break;
2747 /* FALLTHRU */
2748 case GIMPLE_OMP_SECTIONS:
2749 case GIMPLE_OMP_SINGLE:
2750 case GIMPLE_OMP_TASK:
2751 error_at (gimple_location (stmt),
2752 "%<master%> region may not be closely nested inside "
2753 "of work-sharing, explicit %<task%> or %<taskloop%> "
2754 "region");
2755 return false;
2756 case GIMPLE_OMP_PARALLEL:
2757 case GIMPLE_OMP_TEAMS:
2758 return true;
2759 case GIMPLE_OMP_TARGET:
2760 if (gimple_omp_target_kind (ctx->stmt)
2761 == GF_OMP_TARGET_KIND_REGION)
2762 return true;
2763 break;
2764 default:
2765 break;
2767 break;
2768 case GIMPLE_OMP_TASK:
2769 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2770 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2771 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2772 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2774 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2775 error_at (OMP_CLAUSE_LOCATION (c),
2776 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2777 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2778 return false;
2780 break;
2781 case GIMPLE_OMP_ORDERED:
2782 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2783 c; c = OMP_CLAUSE_CHAIN (c))
2785 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2787 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2788 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2789 continue;
2791 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2792 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2793 || kind == OMP_CLAUSE_DEPEND_SINK)
2795 tree oclause;
2796 /* Look for containing ordered(N) loop. */
2797 if (ctx == NULL
2798 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2799 || (oclause
2800 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2801 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2803 error_at (OMP_CLAUSE_LOCATION (c),
2804 "%<ordered%> construct with %<depend%> clause "
2805 "must be closely nested inside an %<ordered%> "
2806 "loop");
2807 return false;
2809 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2811 error_at (OMP_CLAUSE_LOCATION (c),
2812 "%<ordered%> construct with %<depend%> clause "
2813 "must be closely nested inside a loop with "
2814 "%<ordered%> clause with a parameter");
2815 return false;
2818 else
2820 error_at (OMP_CLAUSE_LOCATION (c),
2821 "invalid depend kind in omp %<ordered%> %<depend%>");
2822 return false;
2825 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2826 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2828 /* ordered simd must be closely nested inside of simd region,
2829 and simd region must not encounter constructs other than
2830 ordered simd, therefore ordered simd may be either orphaned,
2831 or ctx->stmt must be simd. The latter case is handled already
2832 earlier. */
2833 if (ctx != NULL)
2835 error_at (gimple_location (stmt),
2836 "%<ordered%> %<simd%> must be closely nested inside "
2837 "%<simd%> region");
2838 return false;
2841 for (; ctx != NULL; ctx = ctx->outer)
2842 switch (gimple_code (ctx->stmt))
2844 case GIMPLE_OMP_CRITICAL:
2845 case GIMPLE_OMP_TASK:
2846 case GIMPLE_OMP_ORDERED:
2847 ordered_in_taskloop:
2848 error_at (gimple_location (stmt),
2849 "%<ordered%> region may not be closely nested inside "
2850 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2851 "%<taskloop%> region");
2852 return false;
2853 case GIMPLE_OMP_FOR:
2854 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2855 goto ordered_in_taskloop;
2856 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2857 OMP_CLAUSE_ORDERED) == NULL)
2859 error_at (gimple_location (stmt),
2860 "%<ordered%> region must be closely nested inside "
2861 "a loop region with an %<ordered%> clause");
2862 return false;
2864 return true;
2865 case GIMPLE_OMP_TARGET:
2866 if (gimple_omp_target_kind (ctx->stmt)
2867 != GF_OMP_TARGET_KIND_REGION)
2868 break;
2869 /* FALLTHRU */
2870 case GIMPLE_OMP_PARALLEL:
2871 case GIMPLE_OMP_TEAMS:
2872 error_at (gimple_location (stmt),
2873 "%<ordered%> region must be closely nested inside "
2874 "a loop region with an %<ordered%> clause");
2875 return false;
2876 default:
2877 break;
2879 break;
2880 case GIMPLE_OMP_CRITICAL:
2882 tree this_stmt_name
2883 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2884 for (; ctx != NULL; ctx = ctx->outer)
2885 if (gomp_critical *other_crit
2886 = dyn_cast <gomp_critical *> (ctx->stmt))
2887 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2889 error_at (gimple_location (stmt),
2890 "%<critical%> region may not be nested inside "
2891 "a %<critical%> region with the same name");
2892 return false;
2895 break;
2896 case GIMPLE_OMP_TEAMS:
2897 if (ctx == NULL
2898 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2899 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2901 error_at (gimple_location (stmt),
2902 "%<teams%> construct not closely nested inside of "
2903 "%<target%> construct");
2904 return false;
2906 break;
2907 case GIMPLE_OMP_TARGET:
2908 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2910 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2911 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2913 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2914 error_at (OMP_CLAUSE_LOCATION (c),
2915 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2916 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2917 return false;
2919 if (is_gimple_omp_offloaded (stmt)
2920 && oacc_get_fn_attrib (cfun->decl) != NULL)
2922 error_at (gimple_location (stmt),
2923 "OpenACC region inside of OpenACC routine, nested "
2924 "parallelism not supported yet");
2925 return false;
2927 for (; ctx != NULL; ctx = ctx->outer)
2929 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2931 if (is_gimple_omp (stmt)
2932 && is_gimple_omp_oacc (stmt)
2933 && is_gimple_omp (ctx->stmt))
2935 error_at (gimple_location (stmt),
2936 "OpenACC construct inside of non-OpenACC region");
2937 return false;
2939 continue;
2942 const char *stmt_name, *ctx_stmt_name;
2943 switch (gimple_omp_target_kind (stmt))
2945 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2946 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2947 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2948 case GF_OMP_TARGET_KIND_ENTER_DATA:
2949 stmt_name = "target enter data"; break;
2950 case GF_OMP_TARGET_KIND_EXIT_DATA:
2951 stmt_name = "target exit data"; break;
2952 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2953 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2954 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2955 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2956 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2957 stmt_name = "enter/exit data"; break;
2958 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2959 break;
2960 default: gcc_unreachable ();
2962 switch (gimple_omp_target_kind (ctx->stmt))
2964 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2965 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2966 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2967 ctx_stmt_name = "parallel"; break;
2968 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2969 ctx_stmt_name = "kernels"; break;
2970 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2971 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2972 ctx_stmt_name = "host_data"; break;
2973 default: gcc_unreachable ();
2976 /* OpenACC/OpenMP mismatch? */
2977 if (is_gimple_omp_oacc (stmt)
2978 != is_gimple_omp_oacc (ctx->stmt))
2980 error_at (gimple_location (stmt),
2981 "%s %qs construct inside of %s %qs region",
2982 (is_gimple_omp_oacc (stmt)
2983 ? "OpenACC" : "OpenMP"), stmt_name,
2984 (is_gimple_omp_oacc (ctx->stmt)
2985 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2986 return false;
2988 if (is_gimple_omp_offloaded (ctx->stmt))
2990 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2991 if (is_gimple_omp_oacc (ctx->stmt))
2993 error_at (gimple_location (stmt),
2994 "%qs construct inside of %qs region",
2995 stmt_name, ctx_stmt_name);
2996 return false;
2998 else
3000 warning_at (gimple_location (stmt), 0,
3001 "%qs construct inside of %qs region",
3002 stmt_name, ctx_stmt_name);
3006 break;
3007 default:
3008 break;
3010 return true;
3014 /* Helper function scan_omp.
3016 Callback for walk_tree or operators in walk_gimple_stmt used to
3017 scan for OMP directives in TP. */
3019 static tree
3020 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3022 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3023 omp_context *ctx = (omp_context *) wi->info;
3024 tree t = *tp;
3026 switch (TREE_CODE (t))
3028 case VAR_DECL:
3029 case PARM_DECL:
3030 case LABEL_DECL:
3031 case RESULT_DECL:
3032 if (ctx)
3034 tree repl = remap_decl (t, &ctx->cb);
3035 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3036 *tp = repl;
3038 break;
3040 default:
3041 if (ctx && TYPE_P (t))
3042 *tp = remap_type (t, &ctx->cb);
3043 else if (!DECL_P (t))
3045 *walk_subtrees = 1;
3046 if (ctx)
3048 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3049 if (tem != TREE_TYPE (t))
3051 if (TREE_CODE (t) == INTEGER_CST)
3052 *tp = wide_int_to_tree (tem, t);
3053 else
3054 TREE_TYPE (t) = tem;
3058 break;
3061 return NULL_TREE;
3064 /* Return true if FNDECL is a setjmp or a longjmp. */
3066 static bool
3067 setjmp_or_longjmp_p (const_tree fndecl)
3069 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3070 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3071 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3072 return true;
3074 tree declname = DECL_NAME (fndecl);
3075 if (!declname)
3076 return false;
3077 const char *name = IDENTIFIER_POINTER (declname);
3078 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3082 /* Helper function for scan_omp.
3084 Callback for walk_gimple_stmt used to scan for OMP directives in
3085 the current statement in GSI. */
3087 static tree
3088 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3089 struct walk_stmt_info *wi)
3091 gimple *stmt = gsi_stmt (*gsi);
3092 omp_context *ctx = (omp_context *) wi->info;
3094 if (gimple_has_location (stmt))
3095 input_location = gimple_location (stmt);
3097 /* Check the nesting restrictions. */
3098 bool remove = false;
3099 if (is_gimple_omp (stmt))
3100 remove = !check_omp_nesting_restrictions (stmt, ctx);
3101 else if (is_gimple_call (stmt))
3103 tree fndecl = gimple_call_fndecl (stmt);
3104 if (fndecl)
3106 if (setjmp_or_longjmp_p (fndecl)
3107 && ctx
3108 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3109 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3111 remove = true;
3112 error_at (gimple_location (stmt),
3113 "setjmp/longjmp inside simd construct");
3115 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3116 switch (DECL_FUNCTION_CODE (fndecl))
3118 case BUILT_IN_GOMP_BARRIER:
3119 case BUILT_IN_GOMP_CANCEL:
3120 case BUILT_IN_GOMP_CANCELLATION_POINT:
3121 case BUILT_IN_GOMP_TASKYIELD:
3122 case BUILT_IN_GOMP_TASKWAIT:
3123 case BUILT_IN_GOMP_TASKGROUP_START:
3124 case BUILT_IN_GOMP_TASKGROUP_END:
3125 remove = !check_omp_nesting_restrictions (stmt, ctx);
3126 break;
3127 default:
3128 break;
3132 if (remove)
3134 stmt = gimple_build_nop ();
3135 gsi_replace (gsi, stmt, false);
3138 *handled_ops_p = true;
3140 switch (gimple_code (stmt))
3142 case GIMPLE_OMP_PARALLEL:
3143 taskreg_nesting_level++;
3144 scan_omp_parallel (gsi, ctx);
3145 taskreg_nesting_level--;
3146 break;
3148 case GIMPLE_OMP_TASK:
3149 taskreg_nesting_level++;
3150 scan_omp_task (gsi, ctx);
3151 taskreg_nesting_level--;
3152 break;
3154 case GIMPLE_OMP_FOR:
3155 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3156 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3157 && omp_maybe_offloaded_ctx (ctx)
3158 && omp_max_simt_vf ())
3159 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3160 else
3161 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3162 break;
3164 case GIMPLE_OMP_SECTIONS:
3165 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3166 break;
3168 case GIMPLE_OMP_SINGLE:
3169 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3170 break;
3172 case GIMPLE_OMP_SECTION:
3173 case GIMPLE_OMP_MASTER:
3174 case GIMPLE_OMP_TASKGROUP:
3175 case GIMPLE_OMP_ORDERED:
3176 case GIMPLE_OMP_CRITICAL:
3177 case GIMPLE_OMP_GRID_BODY:
3178 ctx = new_omp_context (stmt, ctx);
3179 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3180 break;
3182 case GIMPLE_OMP_TARGET:
3183 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3184 break;
3186 case GIMPLE_OMP_TEAMS:
3187 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3188 break;
3190 case GIMPLE_BIND:
3192 tree var;
3194 *handled_ops_p = false;
3195 if (ctx)
3196 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3197 var ;
3198 var = DECL_CHAIN (var))
3199 insert_decl_map (&ctx->cb, var, var);
3201 break;
3202 default:
3203 *handled_ops_p = false;
3204 break;
3207 return NULL_TREE;
3211 /* Scan all the statements starting at the current statement. CTX
3212 contains context information about the OMP directives and
3213 clauses found during the scan. */
3215 static void
3216 scan_omp (gimple_seq *body_p, omp_context *ctx)
3218 location_t saved_location;
3219 struct walk_stmt_info wi;
3221 memset (&wi, 0, sizeof (wi));
3222 wi.info = ctx;
3223 wi.want_locations = true;
3225 saved_location = input_location;
3226 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3227 input_location = saved_location;
3230 /* Re-gimplification and code generation routines. */
3232 /* If a context was created for STMT when it was scanned, return it. */
3234 static omp_context *
3235 maybe_lookup_ctx (gimple *stmt)
3237 splay_tree_node n;
3238 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3239 return n ? (omp_context *) n->value : NULL;
3243 /* Find the mapping for DECL in CTX or the immediately enclosing
3244 context that has a mapping for DECL.
3246 If CTX is a nested parallel directive, we may have to use the decl
3247 mappings created in CTX's parent context. Suppose that we have the
3248 following parallel nesting (variable UIDs showed for clarity):
3250 iD.1562 = 0;
3251 #omp parallel shared(iD.1562) -> outer parallel
3252 iD.1562 = iD.1562 + 1;
3254 #omp parallel shared (iD.1562) -> inner parallel
3255 iD.1562 = iD.1562 - 1;
3257 Each parallel structure will create a distinct .omp_data_s structure
3258 for copying iD.1562 in/out of the directive:
3260 outer parallel .omp_data_s.1.i -> iD.1562
3261 inner parallel .omp_data_s.2.i -> iD.1562
3263 A shared variable mapping will produce a copy-out operation before
3264 the parallel directive and a copy-in operation after it. So, in
3265 this case we would have:
3267 iD.1562 = 0;
3268 .omp_data_o.1.i = iD.1562;
3269 #omp parallel shared(iD.1562) -> outer parallel
3270 .omp_data_i.1 = &.omp_data_o.1
3271 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3273 .omp_data_o.2.i = iD.1562; -> **
3274 #omp parallel shared(iD.1562) -> inner parallel
3275 .omp_data_i.2 = &.omp_data_o.2
3276 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3279 ** This is a problem. The symbol iD.1562 cannot be referenced
3280 inside the body of the outer parallel region. But since we are
3281 emitting this copy operation while expanding the inner parallel
3282 directive, we need to access the CTX structure of the outer
3283 parallel directive to get the correct mapping:
3285 .omp_data_o.2.i = .omp_data_i.1->i
3287 Since there may be other workshare or parallel directives enclosing
3288 the parallel directive, it may be necessary to walk up the context
3289 parent chain. This is not a problem in general because nested
3290 parallelism happens only rarely. */
3292 static tree
3293 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3295 tree t;
3296 omp_context *up;
3298 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3299 t = maybe_lookup_decl (decl, up);
3301 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3303 return t ? t : decl;
3307 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3308 in outer contexts. */
3310 static tree
3311 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3313 tree t = NULL;
3314 omp_context *up;
3316 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3317 t = maybe_lookup_decl (decl, up);
3319 return t ? t : decl;
3323 /* Construct the initialization value for reduction operation OP. */
3325 tree
3326 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3328 switch (op)
3330 case PLUS_EXPR:
3331 case MINUS_EXPR:
3332 case BIT_IOR_EXPR:
3333 case BIT_XOR_EXPR:
3334 case TRUTH_OR_EXPR:
3335 case TRUTH_ORIF_EXPR:
3336 case TRUTH_XOR_EXPR:
3337 case NE_EXPR:
3338 return build_zero_cst (type);
3340 case MULT_EXPR:
3341 case TRUTH_AND_EXPR:
3342 case TRUTH_ANDIF_EXPR:
3343 case EQ_EXPR:
3344 return fold_convert_loc (loc, type, integer_one_node);
3346 case BIT_AND_EXPR:
3347 return fold_convert_loc (loc, type, integer_minus_one_node);
3349 case MAX_EXPR:
3350 if (SCALAR_FLOAT_TYPE_P (type))
3352 REAL_VALUE_TYPE max, min;
3353 if (HONOR_INFINITIES (type))
3355 real_inf (&max);
3356 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3358 else
3359 real_maxval (&min, 1, TYPE_MODE (type));
3360 return build_real (type, min);
3362 else if (POINTER_TYPE_P (type))
3364 wide_int min
3365 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3366 return wide_int_to_tree (type, min);
3368 else
3370 gcc_assert (INTEGRAL_TYPE_P (type));
3371 return TYPE_MIN_VALUE (type);
3374 case MIN_EXPR:
3375 if (SCALAR_FLOAT_TYPE_P (type))
3377 REAL_VALUE_TYPE max;
3378 if (HONOR_INFINITIES (type))
3379 real_inf (&max);
3380 else
3381 real_maxval (&max, 0, TYPE_MODE (type));
3382 return build_real (type, max);
3384 else if (POINTER_TYPE_P (type))
3386 wide_int max
3387 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3388 return wide_int_to_tree (type, max);
3390 else
3392 gcc_assert (INTEGRAL_TYPE_P (type));
3393 return TYPE_MAX_VALUE (type);
3396 default:
3397 gcc_unreachable ();
3401 /* Construct the initialization value for reduction CLAUSE. */
3403 tree
3404 omp_reduction_init (tree clause, tree type)
3406 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3407 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3410 /* Return alignment to be assumed for var in CLAUSE, which should be
3411 OMP_CLAUSE_ALIGNED. */
3413 static tree
3414 omp_clause_aligned_alignment (tree clause)
3416 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3417 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3419 /* Otherwise return implementation defined alignment. */
3420 unsigned int al = 1;
3421 machine_mode mode, vmode;
3422 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3423 if (vs)
3424 vs = 1 << floor_log2 (vs);
3425 static enum mode_class classes[]
3426 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3427 for (int i = 0; i < 4; i += 2)
3428 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3429 mode != VOIDmode;
3430 mode = GET_MODE_WIDER_MODE (mode))
3432 vmode = targetm.vectorize.preferred_simd_mode (mode);
3433 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3434 continue;
3435 while (vs
3436 && GET_MODE_SIZE (vmode) < vs
3437 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3438 vmode = GET_MODE_2XWIDER_MODE (vmode);
3440 tree type = lang_hooks.types.type_for_mode (mode, 1);
3441 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3442 continue;
3443 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3444 / GET_MODE_SIZE (mode));
3445 if (TYPE_MODE (type) != vmode)
3446 continue;
3447 if (TYPE_ALIGN_UNIT (type) > al)
3448 al = TYPE_ALIGN_UNIT (type);
3450 return build_int_cst (integer_type_node, al);
3454 /* This structure is part of the interface between lower_rec_simd_input_clauses
3455 and lower_rec_input_clauses. */
3457 struct omplow_simd_context {
3458 tree idx;
3459 tree lane;
3460 vec<tree, va_heap> simt_eargs;
3461 gimple_seq simt_dlist;
3462 int max_vf;
3463 bool is_simt;
3466 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3467 privatization. */
3469 static bool
3470 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3471 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3473 if (sctx->max_vf == 0)
3475 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3476 if (sctx->max_vf > 1)
3478 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3479 OMP_CLAUSE_SAFELEN);
3480 if (c
3481 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3482 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3483 sctx->max_vf = 1;
3484 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3485 sctx->max_vf) == -1)
3486 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3488 if (sctx->max_vf > 1)
3490 sctx->idx = create_tmp_var (unsigned_type_node);
3491 sctx->lane = create_tmp_var (unsigned_type_node);
3494 if (sctx->max_vf == 1)
3495 return false;
3497 if (sctx->is_simt)
3499 if (is_gimple_reg (new_var))
3501 ivar = lvar = new_var;
3502 return true;
3504 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3505 ivar = lvar = create_tmp_var (type);
3506 TREE_ADDRESSABLE (ivar) = 1;
3507 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3508 NULL, DECL_ATTRIBUTES (ivar));
3509 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3510 tree clobber = build_constructor (type, NULL);
3511 TREE_THIS_VOLATILE (clobber) = 1;
3512 gimple *g = gimple_build_assign (ivar, clobber);
3513 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3515 else
3517 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3518 tree avar = create_tmp_var_raw (atype);
3519 if (TREE_ADDRESSABLE (new_var))
3520 TREE_ADDRESSABLE (avar) = 1;
3521 DECL_ATTRIBUTES (avar)
3522 = tree_cons (get_identifier ("omp simd array"), NULL,
3523 DECL_ATTRIBUTES (avar));
3524 gimple_add_tmp_var (avar);
3525 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3526 NULL_TREE, NULL_TREE);
3527 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3528 NULL_TREE, NULL_TREE);
3530 if (DECL_P (new_var))
3532 SET_DECL_VALUE_EXPR (new_var, lvar);
3533 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3535 return true;
3538 /* Helper function of lower_rec_input_clauses. For a reference
3539 in simd reduction, add an underlying variable it will reference. */
3541 static void
3542 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3544 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3545 if (TREE_CONSTANT (z))
3547 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3548 get_name (new_vard));
3549 gimple_add_tmp_var (z);
3550 TREE_ADDRESSABLE (z) = 1;
3551 z = build_fold_addr_expr_loc (loc, z);
3552 gimplify_assign (new_vard, z, ilist);
3556 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3557 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3558 private variables. Initialization statements go in ILIST, while calls
3559 to destructors go in DLIST. */
3561 static void
3562 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3563 omp_context *ctx, struct omp_for_data *fd)
3565 tree c, dtor, copyin_seq, x, ptr;
3566 bool copyin_by_ref = false;
3567 bool lastprivate_firstprivate = false;
3568 bool reduction_omp_orig_ref = false;
3569 int pass;
3570 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3571 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3572 omplow_simd_context sctx = omplow_simd_context ();
3573 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3574 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3575 gimple_seq llist[3] = { };
3577 copyin_seq = NULL;
3578 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3580 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3581 with data sharing clauses referencing variable sized vars. That
3582 is unnecessarily hard to support and very unlikely to result in
3583 vectorized code anyway. */
3584 if (is_simd)
3585 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3586 switch (OMP_CLAUSE_CODE (c))
3588 case OMP_CLAUSE_LINEAR:
3589 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3590 sctx.max_vf = 1;
3591 /* FALLTHRU */
3592 case OMP_CLAUSE_PRIVATE:
3593 case OMP_CLAUSE_FIRSTPRIVATE:
3594 case OMP_CLAUSE_LASTPRIVATE:
3595 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3596 sctx.max_vf = 1;
3597 break;
3598 case OMP_CLAUSE_REDUCTION:
3599 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3600 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3601 sctx.max_vf = 1;
3602 break;
3603 default:
3604 continue;
3607 /* Add a placeholder for simduid. */
3608 if (sctx.is_simt && sctx.max_vf != 1)
3609 sctx.simt_eargs.safe_push (NULL_TREE);
3611 /* Do all the fixed sized types in the first pass, and the variable sized
3612 types in the second pass. This makes sure that the scalar arguments to
3613 the variable sized types are processed before we use them in the
3614 variable sized operations. */
3615 for (pass = 0; pass < 2; ++pass)
3617 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3619 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3620 tree var, new_var;
3621 bool by_ref;
3622 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3624 switch (c_kind)
3626 case OMP_CLAUSE_PRIVATE:
3627 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3628 continue;
3629 break;
3630 case OMP_CLAUSE_SHARED:
3631 /* Ignore shared directives in teams construct. */
3632 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3633 continue;
3634 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3636 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3637 || is_global_var (OMP_CLAUSE_DECL (c)));
3638 continue;
3640 case OMP_CLAUSE_FIRSTPRIVATE:
3641 case OMP_CLAUSE_COPYIN:
3642 break;
3643 case OMP_CLAUSE_LINEAR:
3644 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3645 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3646 lastprivate_firstprivate = true;
3647 break;
3648 case OMP_CLAUSE_REDUCTION:
3649 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3650 reduction_omp_orig_ref = true;
3651 break;
3652 case OMP_CLAUSE__LOOPTEMP_:
3653 /* Handle _looptemp_ clauses only on parallel/task. */
3654 if (fd)
3655 continue;
3656 break;
3657 case OMP_CLAUSE_LASTPRIVATE:
3658 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3660 lastprivate_firstprivate = true;
3661 if (pass != 0 || is_taskloop_ctx (ctx))
3662 continue;
3664 /* Even without corresponding firstprivate, if
3665 decl is Fortran allocatable, it needs outer var
3666 reference. */
3667 else if (pass == 0
3668 && lang_hooks.decls.omp_private_outer_ref
3669 (OMP_CLAUSE_DECL (c)))
3670 lastprivate_firstprivate = true;
3671 break;
3672 case OMP_CLAUSE_ALIGNED:
3673 if (pass == 0)
3674 continue;
3675 var = OMP_CLAUSE_DECL (c);
3676 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3677 && !is_global_var (var))
3679 new_var = maybe_lookup_decl (var, ctx);
3680 if (new_var == NULL_TREE)
3681 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3682 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3683 tree alarg = omp_clause_aligned_alignment (c);
3684 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3685 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3686 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3687 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3688 gimplify_and_add (x, ilist);
3690 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3691 && is_global_var (var))
3693 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3694 new_var = lookup_decl (var, ctx);
3695 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3696 t = build_fold_addr_expr_loc (clause_loc, t);
3697 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3698 tree alarg = omp_clause_aligned_alignment (c);
3699 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3700 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3701 t = fold_convert_loc (clause_loc, ptype, t);
3702 x = create_tmp_var (ptype);
3703 t = build2 (MODIFY_EXPR, ptype, x, t);
3704 gimplify_and_add (t, ilist);
3705 t = build_simple_mem_ref_loc (clause_loc, x);
3706 SET_DECL_VALUE_EXPR (new_var, t);
3707 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3709 continue;
3710 default:
3711 continue;
3714 new_var = var = OMP_CLAUSE_DECL (c);
3715 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3717 var = TREE_OPERAND (var, 0);
3718 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3719 var = TREE_OPERAND (var, 0);
3720 if (TREE_CODE (var) == INDIRECT_REF
3721 || TREE_CODE (var) == ADDR_EXPR)
3722 var = TREE_OPERAND (var, 0);
3723 if (is_variable_sized (var))
3725 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3726 var = DECL_VALUE_EXPR (var);
3727 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3728 var = TREE_OPERAND (var, 0);
3729 gcc_assert (DECL_P (var));
3731 new_var = var;
3733 if (c_kind != OMP_CLAUSE_COPYIN)
3734 new_var = lookup_decl (var, ctx);
3736 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3738 if (pass != 0)
3739 continue;
3741 /* C/C++ array section reductions. */
3742 else if (c_kind == OMP_CLAUSE_REDUCTION
3743 && var != OMP_CLAUSE_DECL (c))
3745 if (pass == 0)
3746 continue;
3748 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3749 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3750 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3752 tree b = TREE_OPERAND (orig_var, 1);
3753 b = maybe_lookup_decl (b, ctx);
3754 if (b == NULL)
3756 b = TREE_OPERAND (orig_var, 1);
3757 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3759 if (integer_zerop (bias))
3760 bias = b;
3761 else
3763 bias = fold_convert_loc (clause_loc,
3764 TREE_TYPE (b), bias);
3765 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3766 TREE_TYPE (b), b, bias);
3768 orig_var = TREE_OPERAND (orig_var, 0);
3770 if (TREE_CODE (orig_var) == INDIRECT_REF
3771 || TREE_CODE (orig_var) == ADDR_EXPR)
3772 orig_var = TREE_OPERAND (orig_var, 0);
3773 tree d = OMP_CLAUSE_DECL (c);
3774 tree type = TREE_TYPE (d);
3775 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3776 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3777 const char *name = get_name (orig_var);
3778 if (TREE_CONSTANT (v))
3780 x = create_tmp_var_raw (type, name);
3781 gimple_add_tmp_var (x);
3782 TREE_ADDRESSABLE (x) = 1;
3783 x = build_fold_addr_expr_loc (clause_loc, x);
3785 else
3787 tree atmp
3788 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3789 tree t = maybe_lookup_decl (v, ctx);
3790 if (t)
3791 v = t;
3792 else
3793 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3794 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3795 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3796 TREE_TYPE (v), v,
3797 build_int_cst (TREE_TYPE (v), 1));
3798 t = fold_build2_loc (clause_loc, MULT_EXPR,
3799 TREE_TYPE (v), t,
3800 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3801 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3802 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3805 tree ptype = build_pointer_type (TREE_TYPE (type));
3806 x = fold_convert_loc (clause_loc, ptype, x);
3807 tree y = create_tmp_var (ptype, name);
3808 gimplify_assign (y, x, ilist);
3809 x = y;
3810 tree yb = y;
3812 if (!integer_zerop (bias))
3814 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3815 bias);
3816 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3818 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3819 pointer_sized_int_node, yb, bias);
3820 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3821 yb = create_tmp_var (ptype, name);
3822 gimplify_assign (yb, x, ilist);
3823 x = yb;
3826 d = TREE_OPERAND (d, 0);
3827 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3828 d = TREE_OPERAND (d, 0);
3829 if (TREE_CODE (d) == ADDR_EXPR)
3831 if (orig_var != var)
3833 gcc_assert (is_variable_sized (orig_var));
3834 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3836 gimplify_assign (new_var, x, ilist);
3837 tree new_orig_var = lookup_decl (orig_var, ctx);
3838 tree t = build_fold_indirect_ref (new_var);
3839 DECL_IGNORED_P (new_var) = 0;
3840 TREE_THIS_NOTRAP (t);
3841 SET_DECL_VALUE_EXPR (new_orig_var, t);
3842 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3844 else
3846 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3847 build_int_cst (ptype, 0));
3848 SET_DECL_VALUE_EXPR (new_var, x);
3849 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3852 else
3854 gcc_assert (orig_var == var);
3855 if (TREE_CODE (d) == INDIRECT_REF)
3857 x = create_tmp_var (ptype, name);
3858 TREE_ADDRESSABLE (x) = 1;
3859 gimplify_assign (x, yb, ilist);
3860 x = build_fold_addr_expr_loc (clause_loc, x);
3862 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3863 gimplify_assign (new_var, x, ilist);
3865 tree y1 = create_tmp_var (ptype, NULL);
3866 gimplify_assign (y1, y, ilist);
3867 tree i2 = NULL_TREE, y2 = NULL_TREE;
3868 tree body2 = NULL_TREE, end2 = NULL_TREE;
3869 tree y3 = NULL_TREE, y4 = NULL_TREE;
3870 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3872 y2 = create_tmp_var (ptype, NULL);
3873 gimplify_assign (y2, y, ilist);
3874 tree ref = build_outer_var_ref (var, ctx);
3875 /* For ref build_outer_var_ref already performs this. */
3876 if (TREE_CODE (d) == INDIRECT_REF)
3877 gcc_assert (omp_is_reference (var));
3878 else if (TREE_CODE (d) == ADDR_EXPR)
3879 ref = build_fold_addr_expr (ref);
3880 else if (omp_is_reference (var))
3881 ref = build_fold_addr_expr (ref);
3882 ref = fold_convert_loc (clause_loc, ptype, ref);
3883 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3884 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3886 y3 = create_tmp_var (ptype, NULL);
3887 gimplify_assign (y3, unshare_expr (ref), ilist);
3889 if (is_simd)
3891 y4 = create_tmp_var (ptype, NULL);
3892 gimplify_assign (y4, ref, dlist);
3895 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3896 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3897 tree body = create_artificial_label (UNKNOWN_LOCATION);
3898 tree end = create_artificial_label (UNKNOWN_LOCATION);
3899 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3900 if (y2)
3902 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3903 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3904 body2 = create_artificial_label (UNKNOWN_LOCATION);
3905 end2 = create_artificial_label (UNKNOWN_LOCATION);
3906 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3908 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3910 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3911 tree decl_placeholder
3912 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3913 SET_DECL_VALUE_EXPR (decl_placeholder,
3914 build_simple_mem_ref (y1));
3915 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3916 SET_DECL_VALUE_EXPR (placeholder,
3917 y3 ? build_simple_mem_ref (y3)
3918 : error_mark_node);
3919 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3920 x = lang_hooks.decls.omp_clause_default_ctor
3921 (c, build_simple_mem_ref (y1),
3922 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3923 if (x)
3924 gimplify_and_add (x, ilist);
3925 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3927 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3928 lower_omp (&tseq, ctx);
3929 gimple_seq_add_seq (ilist, tseq);
3931 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3932 if (is_simd)
3934 SET_DECL_VALUE_EXPR (decl_placeholder,
3935 build_simple_mem_ref (y2));
3936 SET_DECL_VALUE_EXPR (placeholder,
3937 build_simple_mem_ref (y4));
3938 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3939 lower_omp (&tseq, ctx);
3940 gimple_seq_add_seq (dlist, tseq);
3941 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3943 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3944 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3945 x = lang_hooks.decls.omp_clause_dtor
3946 (c, build_simple_mem_ref (y2));
3947 if (x)
3949 gimple_seq tseq = NULL;
3950 dtor = x;
3951 gimplify_stmt (&dtor, &tseq);
3952 gimple_seq_add_seq (dlist, tseq);
3955 else
3957 x = omp_reduction_init (c, TREE_TYPE (type));
3958 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3960 /* reduction(-:var) sums up the partial results, so it
3961 acts identically to reduction(+:var). */
3962 if (code == MINUS_EXPR)
3963 code = PLUS_EXPR;
3965 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3966 if (is_simd)
3968 x = build2 (code, TREE_TYPE (type),
3969 build_simple_mem_ref (y4),
3970 build_simple_mem_ref (y2));
3971 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3974 gimple *g
3975 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3976 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3977 gimple_seq_add_stmt (ilist, g);
3978 if (y3)
3980 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3981 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3982 gimple_seq_add_stmt (ilist, g);
3984 g = gimple_build_assign (i, PLUS_EXPR, i,
3985 build_int_cst (TREE_TYPE (i), 1));
3986 gimple_seq_add_stmt (ilist, g);
3987 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3988 gimple_seq_add_stmt (ilist, g);
3989 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3990 if (y2)
3992 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3993 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3994 gimple_seq_add_stmt (dlist, g);
3995 if (y4)
3997 g = gimple_build_assign
3998 (y4, POINTER_PLUS_EXPR, y4,
3999 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4000 gimple_seq_add_stmt (dlist, g);
4002 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4003 build_int_cst (TREE_TYPE (i2), 1));
4004 gimple_seq_add_stmt (dlist, g);
4005 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4006 gimple_seq_add_stmt (dlist, g);
4007 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4009 continue;
4011 else if (is_variable_sized (var))
4013 /* For variable sized types, we need to allocate the
4014 actual storage here. Call alloca and store the
4015 result in the pointer decl that we created elsewhere. */
4016 if (pass == 0)
4017 continue;
4019 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4021 gcall *stmt;
4022 tree tmp, atmp;
4024 ptr = DECL_VALUE_EXPR (new_var);
4025 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4026 ptr = TREE_OPERAND (ptr, 0);
4027 gcc_assert (DECL_P (ptr));
4028 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4030 /* void *tmp = __builtin_alloca */
4031 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4032 stmt = gimple_build_call (atmp, 2, x,
4033 size_int (DECL_ALIGN (var)));
4034 tmp = create_tmp_var_raw (ptr_type_node);
4035 gimple_add_tmp_var (tmp);
4036 gimple_call_set_lhs (stmt, tmp);
4038 gimple_seq_add_stmt (ilist, stmt);
4040 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4041 gimplify_assign (ptr, x, ilist);
4044 else if (omp_is_reference (var))
4046 /* For references that are being privatized for Fortran,
4047 allocate new backing storage for the new pointer
4048 variable. This allows us to avoid changing all the
4049 code that expects a pointer to something that expects
4050 a direct variable. */
4051 if (pass == 0)
4052 continue;
4054 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4055 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4057 x = build_receiver_ref (var, false, ctx);
4058 x = build_fold_addr_expr_loc (clause_loc, x);
4060 else if (TREE_CONSTANT (x))
4062 /* For reduction in SIMD loop, defer adding the
4063 initialization of the reference, because if we decide
4064 to use SIMD array for it, the initilization could cause
4065 expansion ICE. */
4066 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4067 x = NULL_TREE;
4068 else
4070 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4071 get_name (var));
4072 gimple_add_tmp_var (x);
4073 TREE_ADDRESSABLE (x) = 1;
4074 x = build_fold_addr_expr_loc (clause_loc, x);
4077 else
4079 tree atmp
4080 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4081 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4082 tree al = size_int (TYPE_ALIGN (rtype));
4083 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4086 if (x)
4088 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4089 gimplify_assign (new_var, x, ilist);
4092 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4094 else if (c_kind == OMP_CLAUSE_REDUCTION
4095 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4097 if (pass == 0)
4098 continue;
4100 else if (pass != 0)
4101 continue;
4103 switch (OMP_CLAUSE_CODE (c))
4105 case OMP_CLAUSE_SHARED:
4106 /* Ignore shared directives in teams construct. */
4107 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4108 continue;
4109 /* Shared global vars are just accessed directly. */
4110 if (is_global_var (new_var))
4111 break;
4112 /* For taskloop firstprivate/lastprivate, represented
4113 as firstprivate and shared clause on the task, new_var
4114 is the firstprivate var. */
4115 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4116 break;
4117 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4118 needs to be delayed until after fixup_child_record_type so
4119 that we get the correct type during the dereference. */
4120 by_ref = use_pointer_for_field (var, ctx);
4121 x = build_receiver_ref (var, by_ref, ctx);
4122 SET_DECL_VALUE_EXPR (new_var, x);
4123 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4125 /* ??? If VAR is not passed by reference, and the variable
4126 hasn't been initialized yet, then we'll get a warning for
4127 the store into the omp_data_s structure. Ideally, we'd be
4128 able to notice this and not store anything at all, but
4129 we're generating code too early. Suppress the warning. */
4130 if (!by_ref)
4131 TREE_NO_WARNING (var) = 1;
4132 break;
4134 case OMP_CLAUSE_LASTPRIVATE:
4135 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4136 break;
4137 /* FALLTHRU */
4139 case OMP_CLAUSE_PRIVATE:
4140 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4141 x = build_outer_var_ref (var, ctx);
4142 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4144 if (is_task_ctx (ctx))
4145 x = build_receiver_ref (var, false, ctx);
4146 else
4147 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4149 else
4150 x = NULL;
4151 do_private:
4152 tree nx;
4153 nx = lang_hooks.decls.omp_clause_default_ctor
4154 (c, unshare_expr (new_var), x);
4155 if (is_simd)
4157 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4158 if ((TREE_ADDRESSABLE (new_var) || nx || y
4159 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4160 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4161 ivar, lvar))
4163 if (nx)
4164 x = lang_hooks.decls.omp_clause_default_ctor
4165 (c, unshare_expr (ivar), x);
4166 if (nx && x)
4167 gimplify_and_add (x, &llist[0]);
4168 if (y)
4170 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4171 if (y)
4173 gimple_seq tseq = NULL;
4175 dtor = y;
4176 gimplify_stmt (&dtor, &tseq);
4177 gimple_seq_add_seq (&llist[1], tseq);
4180 break;
4183 if (nx)
4184 gimplify_and_add (nx, ilist);
4185 /* FALLTHRU */
4187 do_dtor:
4188 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4189 if (x)
4191 gimple_seq tseq = NULL;
4193 dtor = x;
4194 gimplify_stmt (&dtor, &tseq);
4195 gimple_seq_add_seq (dlist, tseq);
4197 break;
4199 case OMP_CLAUSE_LINEAR:
4200 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4201 goto do_firstprivate;
4202 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4203 x = NULL;
4204 else
4205 x = build_outer_var_ref (var, ctx);
4206 goto do_private;
4208 case OMP_CLAUSE_FIRSTPRIVATE:
4209 if (is_task_ctx (ctx))
4211 if (omp_is_reference (var) || is_variable_sized (var))
4212 goto do_dtor;
4213 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4214 ctx))
4215 || use_pointer_for_field (var, NULL))
4217 x = build_receiver_ref (var, false, ctx);
4218 SET_DECL_VALUE_EXPR (new_var, x);
4219 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4220 goto do_dtor;
4223 do_firstprivate:
4224 x = build_outer_var_ref (var, ctx);
4225 if (is_simd)
4227 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4228 && gimple_omp_for_combined_into_p (ctx->stmt))
4230 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4231 tree stept = TREE_TYPE (t);
4232 tree ct = omp_find_clause (clauses,
4233 OMP_CLAUSE__LOOPTEMP_);
4234 gcc_assert (ct);
4235 tree l = OMP_CLAUSE_DECL (ct);
4236 tree n1 = fd->loop.n1;
4237 tree step = fd->loop.step;
4238 tree itype = TREE_TYPE (l);
4239 if (POINTER_TYPE_P (itype))
4240 itype = signed_type_for (itype);
4241 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4242 if (TYPE_UNSIGNED (itype)
4243 && fd->loop.cond_code == GT_EXPR)
4244 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4245 fold_build1 (NEGATE_EXPR, itype, l),
4246 fold_build1 (NEGATE_EXPR,
4247 itype, step));
4248 else
4249 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4250 t = fold_build2 (MULT_EXPR, stept,
4251 fold_convert (stept, l), t);
4253 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4255 x = lang_hooks.decls.omp_clause_linear_ctor
4256 (c, new_var, x, t);
4257 gimplify_and_add (x, ilist);
4258 goto do_dtor;
4261 if (POINTER_TYPE_P (TREE_TYPE (x)))
4262 x = fold_build2 (POINTER_PLUS_EXPR,
4263 TREE_TYPE (x), x, t);
4264 else
4265 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4268 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4269 || TREE_ADDRESSABLE (new_var))
4270 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4271 ivar, lvar))
4273 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4275 tree iv = create_tmp_var (TREE_TYPE (new_var));
4276 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4277 gimplify_and_add (x, ilist);
4278 gimple_stmt_iterator gsi
4279 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4280 gassign *g
4281 = gimple_build_assign (unshare_expr (lvar), iv);
4282 gsi_insert_before_without_update (&gsi, g,
4283 GSI_SAME_STMT);
4284 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4285 enum tree_code code = PLUS_EXPR;
4286 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4287 code = POINTER_PLUS_EXPR;
4288 g = gimple_build_assign (iv, code, iv, t);
4289 gsi_insert_before_without_update (&gsi, g,
4290 GSI_SAME_STMT);
4291 break;
4293 x = lang_hooks.decls.omp_clause_copy_ctor
4294 (c, unshare_expr (ivar), x);
4295 gimplify_and_add (x, &llist[0]);
4296 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4297 if (x)
4299 gimple_seq tseq = NULL;
4301 dtor = x;
4302 gimplify_stmt (&dtor, &tseq);
4303 gimple_seq_add_seq (&llist[1], tseq);
4305 break;
4308 x = lang_hooks.decls.omp_clause_copy_ctor
4309 (c, unshare_expr (new_var), x);
4310 gimplify_and_add (x, ilist);
4311 goto do_dtor;
4313 case OMP_CLAUSE__LOOPTEMP_:
4314 gcc_assert (is_taskreg_ctx (ctx));
4315 x = build_outer_var_ref (var, ctx);
4316 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4317 gimplify_and_add (x, ilist);
4318 break;
4320 case OMP_CLAUSE_COPYIN:
4321 by_ref = use_pointer_for_field (var, NULL);
4322 x = build_receiver_ref (var, by_ref, ctx);
4323 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4324 append_to_statement_list (x, &copyin_seq);
4325 copyin_by_ref |= by_ref;
4326 break;
4328 case OMP_CLAUSE_REDUCTION:
4329 /* OpenACC reductions are initialized using the
4330 GOACC_REDUCTION internal function. */
4331 if (is_gimple_omp_oacc (ctx->stmt))
4332 break;
4333 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4335 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4336 gimple *tseq;
4337 x = build_outer_var_ref (var, ctx);
4339 if (omp_is_reference (var)
4340 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4341 TREE_TYPE (x)))
4342 x = build_fold_addr_expr_loc (clause_loc, x);
4343 SET_DECL_VALUE_EXPR (placeholder, x);
4344 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4345 tree new_vard = new_var;
4346 if (omp_is_reference (var))
4348 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4349 new_vard = TREE_OPERAND (new_var, 0);
4350 gcc_assert (DECL_P (new_vard));
4352 if (is_simd
4353 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4354 ivar, lvar))
4356 if (new_vard == new_var)
4358 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4359 SET_DECL_VALUE_EXPR (new_var, ivar);
4361 else
4363 SET_DECL_VALUE_EXPR (new_vard,
4364 build_fold_addr_expr (ivar));
4365 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4367 x = lang_hooks.decls.omp_clause_default_ctor
4368 (c, unshare_expr (ivar),
4369 build_outer_var_ref (var, ctx));
4370 if (x)
4371 gimplify_and_add (x, &llist[0]);
4372 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4374 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4375 lower_omp (&tseq, ctx);
4376 gimple_seq_add_seq (&llist[0], tseq);
4378 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4379 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4380 lower_omp (&tseq, ctx);
4381 gimple_seq_add_seq (&llist[1], tseq);
4382 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4383 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4384 if (new_vard == new_var)
4385 SET_DECL_VALUE_EXPR (new_var, lvar);
4386 else
4387 SET_DECL_VALUE_EXPR (new_vard,
4388 build_fold_addr_expr (lvar));
4389 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4390 if (x)
4392 tseq = NULL;
4393 dtor = x;
4394 gimplify_stmt (&dtor, &tseq);
4395 gimple_seq_add_seq (&llist[1], tseq);
4397 break;
4399 /* If this is a reference to constant size reduction var
4400 with placeholder, we haven't emitted the initializer
4401 for it because it is undesirable if SIMD arrays are used.
4402 But if they aren't used, we need to emit the deferred
4403 initialization now. */
4404 else if (omp_is_reference (var) && is_simd)
4405 handle_simd_reference (clause_loc, new_vard, ilist);
4406 x = lang_hooks.decls.omp_clause_default_ctor
4407 (c, unshare_expr (new_var),
4408 build_outer_var_ref (var, ctx));
4409 if (x)
4410 gimplify_and_add (x, ilist);
4411 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4413 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4414 lower_omp (&tseq, ctx);
4415 gimple_seq_add_seq (ilist, tseq);
4417 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4418 if (is_simd)
4420 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4421 lower_omp (&tseq, ctx);
4422 gimple_seq_add_seq (dlist, tseq);
4423 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4425 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4426 goto do_dtor;
4428 else
4430 x = omp_reduction_init (c, TREE_TYPE (new_var));
4431 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4432 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4434 /* reduction(-:var) sums up the partial results, so it
4435 acts identically to reduction(+:var). */
4436 if (code == MINUS_EXPR)
4437 code = PLUS_EXPR;
4439 tree new_vard = new_var;
4440 if (is_simd && omp_is_reference (var))
4442 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4443 new_vard = TREE_OPERAND (new_var, 0);
4444 gcc_assert (DECL_P (new_vard));
4446 if (is_simd
4447 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4448 ivar, lvar))
4450 tree ref = build_outer_var_ref (var, ctx);
4452 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4454 if (sctx.is_simt)
4456 if (!simt_lane)
4457 simt_lane = create_tmp_var (unsigned_type_node);
4458 x = build_call_expr_internal_loc
4459 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4460 TREE_TYPE (ivar), 2, ivar, simt_lane);
4461 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4462 gimplify_assign (ivar, x, &llist[2]);
4464 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4465 ref = build_outer_var_ref (var, ctx);
4466 gimplify_assign (ref, x, &llist[1]);
4468 if (new_vard != new_var)
4470 SET_DECL_VALUE_EXPR (new_vard,
4471 build_fold_addr_expr (lvar));
4472 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4475 else
4477 if (omp_is_reference (var) && is_simd)
4478 handle_simd_reference (clause_loc, new_vard, ilist);
4479 gimplify_assign (new_var, x, ilist);
4480 if (is_simd)
4482 tree ref = build_outer_var_ref (var, ctx);
4484 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4485 ref = build_outer_var_ref (var, ctx);
4486 gimplify_assign (ref, x, dlist);
4490 break;
4492 default:
4493 gcc_unreachable ();
4498 if (sctx.max_vf == 1)
4499 sctx.is_simt = false;
4501 if (sctx.lane || sctx.is_simt)
4503 uid = create_tmp_var (ptr_type_node, "simduid");
4504 /* Don't want uninit warnings on simduid, it is always uninitialized,
4505 but we use it not for the value, but for the DECL_UID only. */
4506 TREE_NO_WARNING (uid) = 1;
4507 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4508 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4509 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4510 gimple_omp_for_set_clauses (ctx->stmt, c);
4512 /* Emit calls denoting privatized variables and initializing a pointer to
4513 structure that holds private variables as fields after ompdevlow pass. */
4514 if (sctx.is_simt)
4516 sctx.simt_eargs[0] = uid;
4517 gimple *g
4518 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4519 gimple_call_set_lhs (g, uid);
4520 gimple_seq_add_stmt (ilist, g);
4521 sctx.simt_eargs.release ();
4523 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4524 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4525 gimple_call_set_lhs (g, simtrec);
4526 gimple_seq_add_stmt (ilist, g);
4528 if (sctx.lane)
4530 gimple *g
4531 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4532 gimple_call_set_lhs (g, sctx.lane);
4533 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4534 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4535 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4536 build_int_cst (unsigned_type_node, 0));
4537 gimple_seq_add_stmt (ilist, g);
4538 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4539 if (llist[2])
4541 tree simt_vf = create_tmp_var (unsigned_type_node);
4542 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4543 gimple_call_set_lhs (g, simt_vf);
4544 gimple_seq_add_stmt (dlist, g);
4546 tree t = build_int_cst (unsigned_type_node, 1);
4547 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4548 gimple_seq_add_stmt (dlist, g);
4550 t = build_int_cst (unsigned_type_node, 0);
4551 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4552 gimple_seq_add_stmt (dlist, g);
4554 tree body = create_artificial_label (UNKNOWN_LOCATION);
4555 tree header = create_artificial_label (UNKNOWN_LOCATION);
4556 tree end = create_artificial_label (UNKNOWN_LOCATION);
4557 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4558 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4560 gimple_seq_add_seq (dlist, llist[2]);
4562 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4563 gimple_seq_add_stmt (dlist, g);
4565 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4566 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4567 gimple_seq_add_stmt (dlist, g);
4569 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4571 for (int i = 0; i < 2; i++)
4572 if (llist[i])
4574 tree vf = create_tmp_var (unsigned_type_node);
4575 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4576 gimple_call_set_lhs (g, vf);
4577 gimple_seq *seq = i == 0 ? ilist : dlist;
4578 gimple_seq_add_stmt (seq, g);
4579 tree t = build_int_cst (unsigned_type_node, 0);
4580 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4581 gimple_seq_add_stmt (seq, g);
4582 tree body = create_artificial_label (UNKNOWN_LOCATION);
4583 tree header = create_artificial_label (UNKNOWN_LOCATION);
4584 tree end = create_artificial_label (UNKNOWN_LOCATION);
4585 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4586 gimple_seq_add_stmt (seq, gimple_build_label (body));
4587 gimple_seq_add_seq (seq, llist[i]);
4588 t = build_int_cst (unsigned_type_node, 1);
4589 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4590 gimple_seq_add_stmt (seq, g);
4591 gimple_seq_add_stmt (seq, gimple_build_label (header));
4592 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4593 gimple_seq_add_stmt (seq, g);
4594 gimple_seq_add_stmt (seq, gimple_build_label (end));
4597 if (sctx.is_simt)
4599 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4600 gimple *g
4601 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4602 gimple_seq_add_stmt (dlist, g);
4605 /* The copyin sequence is not to be executed by the main thread, since
4606 that would result in self-copies. Perhaps not visible to scalars,
4607 but it certainly is to C++ operator=. */
4608 if (copyin_seq)
4610 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4612 x = build2 (NE_EXPR, boolean_type_node, x,
4613 build_int_cst (TREE_TYPE (x), 0));
4614 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4615 gimplify_and_add (x, ilist);
4618 /* If any copyin variable is passed by reference, we must ensure the
4619 master thread doesn't modify it before it is copied over in all
4620 threads. Similarly for variables in both firstprivate and
4621 lastprivate clauses we need to ensure the lastprivate copying
4622 happens after firstprivate copying in all threads. And similarly
4623 for UDRs if initializer expression refers to omp_orig. */
4624 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4626 /* Don't add any barrier for #pragma omp simd or
4627 #pragma omp distribute. */
4628 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4629 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4630 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4633 /* If max_vf is non-zero, then we can use only a vectorization factor
4634 up to the max_vf we chose. So stick it into the safelen clause. */
4635 if (sctx.max_vf)
4637 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4638 OMP_CLAUSE_SAFELEN);
4639 if (c == NULL_TREE
4640 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4641 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4642 sctx.max_vf) == 1))
4644 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4645 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4646 sctx.max_vf);
4647 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4648 gimple_omp_for_set_clauses (ctx->stmt, c);
4654 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4655 both parallel and workshare constructs. PREDICATE may be NULL if it's
4656 always true. */
4658 static void
4659 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4660 omp_context *ctx)
4662 tree x, c, label = NULL, orig_clauses = clauses;
4663 bool par_clauses = false;
4664 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4666 /* Early exit if there are no lastprivate or linear clauses. */
4667 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4668 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4669 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4670 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4671 break;
4672 if (clauses == NULL)
4674 /* If this was a workshare clause, see if it had been combined
4675 with its parallel. In that case, look for the clauses on the
4676 parallel statement itself. */
4677 if (is_parallel_ctx (ctx))
4678 return;
4680 ctx = ctx->outer;
4681 if (ctx == NULL || !is_parallel_ctx (ctx))
4682 return;
4684 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4685 OMP_CLAUSE_LASTPRIVATE);
4686 if (clauses == NULL)
4687 return;
4688 par_clauses = true;
4691 bool maybe_simt = false;
4692 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4693 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4695 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4696 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4697 if (simduid)
4698 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4701 if (predicate)
4703 gcond *stmt;
4704 tree label_true, arm1, arm2;
4705 enum tree_code pred_code = TREE_CODE (predicate);
4707 label = create_artificial_label (UNKNOWN_LOCATION);
4708 label_true = create_artificial_label (UNKNOWN_LOCATION);
4709 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4711 arm1 = TREE_OPERAND (predicate, 0);
4712 arm2 = TREE_OPERAND (predicate, 1);
4713 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4714 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4716 else
4718 arm1 = predicate;
4719 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4720 arm2 = boolean_false_node;
4721 pred_code = NE_EXPR;
4723 if (maybe_simt)
4725 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4726 c = fold_convert (integer_type_node, c);
4727 simtcond = create_tmp_var (integer_type_node);
4728 gimplify_assign (simtcond, c, stmt_list);
4729 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4730 1, simtcond);
4731 c = create_tmp_var (integer_type_node);
4732 gimple_call_set_lhs (g, c);
4733 gimple_seq_add_stmt (stmt_list, g);
4734 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4735 label_true, label);
4737 else
4738 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4739 gimple_seq_add_stmt (stmt_list, stmt);
4740 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4743 for (c = clauses; c ;)
4745 tree var, new_var;
4746 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4748 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4749 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4750 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4752 var = OMP_CLAUSE_DECL (c);
4753 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4754 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4755 && is_taskloop_ctx (ctx))
4757 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4758 new_var = lookup_decl (var, ctx->outer);
4760 else
4762 new_var = lookup_decl (var, ctx);
4763 /* Avoid uninitialized warnings for lastprivate and
4764 for linear iterators. */
4765 if (predicate
4766 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4767 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4768 TREE_NO_WARNING (new_var) = 1;
4771 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4773 tree val = DECL_VALUE_EXPR (new_var);
4774 if (!maybe_simt
4775 && TREE_CODE (val) == ARRAY_REF
4776 && VAR_P (TREE_OPERAND (val, 0))
4777 && lookup_attribute ("omp simd array",
4778 DECL_ATTRIBUTES (TREE_OPERAND (val,
4779 0))))
4781 if (lastlane == NULL)
4783 lastlane = create_tmp_var (unsigned_type_node);
4784 gcall *g
4785 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4786 2, simduid,
4787 TREE_OPERAND (val, 1));
4788 gimple_call_set_lhs (g, lastlane);
4789 gimple_seq_add_stmt (stmt_list, g);
4791 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4792 TREE_OPERAND (val, 0), lastlane,
4793 NULL_TREE, NULL_TREE);
4795 else if (maybe_simt
4796 && VAR_P (val)
4797 && lookup_attribute ("omp simt private",
4798 DECL_ATTRIBUTES (val)))
4800 if (simtlast == NULL)
4802 simtlast = create_tmp_var (unsigned_type_node);
4803 gcall *g = gimple_build_call_internal
4804 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4805 gimple_call_set_lhs (g, simtlast);
4806 gimple_seq_add_stmt (stmt_list, g);
4808 x = build_call_expr_internal_loc
4809 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4810 TREE_TYPE (val), 2, val, simtlast);
4811 new_var = unshare_expr (new_var);
4812 gimplify_assign (new_var, x, stmt_list);
4813 new_var = unshare_expr (new_var);
4817 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4818 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4820 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4821 gimple_seq_add_seq (stmt_list,
4822 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4823 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4825 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4826 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4828 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4829 gimple_seq_add_seq (stmt_list,
4830 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4831 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4834 x = NULL_TREE;
4835 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4836 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4838 gcc_checking_assert (is_taskloop_ctx (ctx));
4839 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4840 ctx->outer->outer);
4841 if (is_global_var (ovar))
4842 x = ovar;
4844 if (!x)
4845 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4846 if (omp_is_reference (var))
4847 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4848 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4849 gimplify_and_add (x, stmt_list);
4851 c = OMP_CLAUSE_CHAIN (c);
4852 if (c == NULL && !par_clauses)
4854 /* If this was a workshare clause, see if it had been combined
4855 with its parallel. In that case, continue looking for the
4856 clauses also on the parallel statement itself. */
4857 if (is_parallel_ctx (ctx))
4858 break;
4860 ctx = ctx->outer;
4861 if (ctx == NULL || !is_parallel_ctx (ctx))
4862 break;
4864 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4865 OMP_CLAUSE_LASTPRIVATE);
4866 par_clauses = true;
4870 if (label)
4871 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4874 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4875 (which might be a placeholder). INNER is true if this is an inner
4876 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4877 join markers. Generate the before-loop forking sequence in
4878 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4879 general form of these sequences is
4881 GOACC_REDUCTION_SETUP
4882 GOACC_FORK
4883 GOACC_REDUCTION_INIT
4885 GOACC_REDUCTION_FINI
4886 GOACC_JOIN
4887 GOACC_REDUCTION_TEARDOWN. */
4889 static void
4890 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4891 gcall *fork, gcall *join, gimple_seq *fork_seq,
4892 gimple_seq *join_seq, omp_context *ctx)
4894 gimple_seq before_fork = NULL;
4895 gimple_seq after_fork = NULL;
4896 gimple_seq before_join = NULL;
4897 gimple_seq after_join = NULL;
4898 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4899 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4900 unsigned offset = 0;
4902 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4903 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4905 tree orig = OMP_CLAUSE_DECL (c);
4906 tree var = maybe_lookup_decl (orig, ctx);
4907 tree ref_to_res = NULL_TREE;
4908 tree incoming, outgoing, v1, v2, v3;
4909 bool is_private = false;
4911 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4912 if (rcode == MINUS_EXPR)
4913 rcode = PLUS_EXPR;
4914 else if (rcode == TRUTH_ANDIF_EXPR)
4915 rcode = BIT_AND_EXPR;
4916 else if (rcode == TRUTH_ORIF_EXPR)
4917 rcode = BIT_IOR_EXPR;
4918 tree op = build_int_cst (unsigned_type_node, rcode);
4920 if (!var)
4921 var = orig;
4923 incoming = outgoing = var;
4925 if (!inner)
4927 /* See if an outer construct also reduces this variable. */
4928 omp_context *outer = ctx;
4930 while (omp_context *probe = outer->outer)
4932 enum gimple_code type = gimple_code (probe->stmt);
4933 tree cls;
4935 switch (type)
4937 case GIMPLE_OMP_FOR:
4938 cls = gimple_omp_for_clauses (probe->stmt);
4939 break;
4941 case GIMPLE_OMP_TARGET:
4942 if (gimple_omp_target_kind (probe->stmt)
4943 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4944 goto do_lookup;
4946 cls = gimple_omp_target_clauses (probe->stmt);
4947 break;
4949 default:
4950 goto do_lookup;
4953 outer = probe;
4954 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4955 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4956 && orig == OMP_CLAUSE_DECL (cls))
4958 incoming = outgoing = lookup_decl (orig, probe);
4959 goto has_outer_reduction;
4961 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4962 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4963 && orig == OMP_CLAUSE_DECL (cls))
4965 is_private = true;
4966 goto do_lookup;
4970 do_lookup:
4971 /* This is the outermost construct with this reduction,
4972 see if there's a mapping for it. */
4973 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4974 && maybe_lookup_field (orig, outer) && !is_private)
4976 ref_to_res = build_receiver_ref (orig, false, outer);
4977 if (omp_is_reference (orig))
4978 ref_to_res = build_simple_mem_ref (ref_to_res);
4980 tree type = TREE_TYPE (var);
4981 if (POINTER_TYPE_P (type))
4982 type = TREE_TYPE (type);
4984 outgoing = var;
4985 incoming = omp_reduction_init_op (loc, rcode, type);
4987 else
4989 /* Try to look at enclosing contexts for reduction var,
4990 use original if no mapping found. */
4991 tree t = NULL_TREE;
4992 omp_context *c = ctx->outer;
4993 while (c && !t)
4995 t = maybe_lookup_decl (orig, c);
4996 c = c->outer;
4998 incoming = outgoing = (t ? t : orig);
5001 has_outer_reduction:;
5004 if (!ref_to_res)
5005 ref_to_res = integer_zero_node;
5007 if (omp_is_reference (orig))
5009 tree type = TREE_TYPE (var);
5010 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5012 if (!inner)
5014 tree x = create_tmp_var (TREE_TYPE (type), id);
5015 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5018 v1 = create_tmp_var (type, id);
5019 v2 = create_tmp_var (type, id);
5020 v3 = create_tmp_var (type, id);
5022 gimplify_assign (v1, var, fork_seq);
5023 gimplify_assign (v2, var, fork_seq);
5024 gimplify_assign (v3, var, fork_seq);
5026 var = build_simple_mem_ref (var);
5027 v1 = build_simple_mem_ref (v1);
5028 v2 = build_simple_mem_ref (v2);
5029 v3 = build_simple_mem_ref (v3);
5030 outgoing = build_simple_mem_ref (outgoing);
5032 if (!TREE_CONSTANT (incoming))
5033 incoming = build_simple_mem_ref (incoming);
5035 else
5036 v1 = v2 = v3 = var;
5038 /* Determine position in reduction buffer, which may be used
5039 by target. */
5040 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5041 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5042 offset = (offset + align - 1) & ~(align - 1);
5043 tree off = build_int_cst (sizetype, offset);
5044 offset += GET_MODE_SIZE (mode);
5046 if (!init_code)
5048 init_code = build_int_cst (integer_type_node,
5049 IFN_GOACC_REDUCTION_INIT);
5050 fini_code = build_int_cst (integer_type_node,
5051 IFN_GOACC_REDUCTION_FINI);
5052 setup_code = build_int_cst (integer_type_node,
5053 IFN_GOACC_REDUCTION_SETUP);
5054 teardown_code = build_int_cst (integer_type_node,
5055 IFN_GOACC_REDUCTION_TEARDOWN);
5058 tree setup_call
5059 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5060 TREE_TYPE (var), 6, setup_code,
5061 unshare_expr (ref_to_res),
5062 incoming, level, op, off);
5063 tree init_call
5064 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5065 TREE_TYPE (var), 6, init_code,
5066 unshare_expr (ref_to_res),
5067 v1, level, op, off);
5068 tree fini_call
5069 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5070 TREE_TYPE (var), 6, fini_code,
5071 unshare_expr (ref_to_res),
5072 v2, level, op, off);
5073 tree teardown_call
5074 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5075 TREE_TYPE (var), 6, teardown_code,
5076 ref_to_res, v3, level, op, off);
5078 gimplify_assign (v1, setup_call, &before_fork);
5079 gimplify_assign (v2, init_call, &after_fork);
5080 gimplify_assign (v3, fini_call, &before_join);
5081 gimplify_assign (outgoing, teardown_call, &after_join);
5084 /* Now stitch things together. */
5085 gimple_seq_add_seq (fork_seq, before_fork);
5086 if (fork)
5087 gimple_seq_add_stmt (fork_seq, fork);
5088 gimple_seq_add_seq (fork_seq, after_fork);
5090 gimple_seq_add_seq (join_seq, before_join);
5091 if (join)
5092 gimple_seq_add_stmt (join_seq, join);
5093 gimple_seq_add_seq (join_seq, after_join);
5096 /* Generate code to implement the REDUCTION clauses. */
5098 static void
5099 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5101 gimple_seq sub_seq = NULL;
5102 gimple *stmt;
5103 tree x, c;
5104 int count = 0;
5106 /* OpenACC loop reductions are handled elsewhere. */
5107 if (is_gimple_omp_oacc (ctx->stmt))
5108 return;
5110 /* SIMD reductions are handled in lower_rec_input_clauses. */
5111 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5112 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5113 return;
5115 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5116 update in that case, otherwise use a lock. */
5117 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5118 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5120 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5121 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5123 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5124 count = -1;
5125 break;
5127 count++;
5130 if (count == 0)
5131 return;
5133 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5135 tree var, ref, new_var, orig_var;
5136 enum tree_code code;
5137 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5139 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5140 continue;
5142 orig_var = var = OMP_CLAUSE_DECL (c);
5143 if (TREE_CODE (var) == MEM_REF)
5145 var = TREE_OPERAND (var, 0);
5146 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5147 var = TREE_OPERAND (var, 0);
5148 if (TREE_CODE (var) == INDIRECT_REF
5149 || TREE_CODE (var) == ADDR_EXPR)
5150 var = TREE_OPERAND (var, 0);
5151 orig_var = var;
5152 if (is_variable_sized (var))
5154 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5155 var = DECL_VALUE_EXPR (var);
5156 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5157 var = TREE_OPERAND (var, 0);
5158 gcc_assert (DECL_P (var));
5161 new_var = lookup_decl (var, ctx);
5162 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5163 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5164 ref = build_outer_var_ref (var, ctx);
5165 code = OMP_CLAUSE_REDUCTION_CODE (c);
5167 /* reduction(-:var) sums up the partial results, so it acts
5168 identically to reduction(+:var). */
5169 if (code == MINUS_EXPR)
5170 code = PLUS_EXPR;
5172 if (count == 1)
5174 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5176 addr = save_expr (addr);
5177 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5178 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5179 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5180 gimplify_and_add (x, stmt_seqp);
5181 return;
5183 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5185 tree d = OMP_CLAUSE_DECL (c);
5186 tree type = TREE_TYPE (d);
5187 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5188 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5189 tree ptype = build_pointer_type (TREE_TYPE (type));
5190 tree bias = TREE_OPERAND (d, 1);
5191 d = TREE_OPERAND (d, 0);
5192 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5194 tree b = TREE_OPERAND (d, 1);
5195 b = maybe_lookup_decl (b, ctx);
5196 if (b == NULL)
5198 b = TREE_OPERAND (d, 1);
5199 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5201 if (integer_zerop (bias))
5202 bias = b;
5203 else
5205 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5206 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5207 TREE_TYPE (b), b, bias);
5209 d = TREE_OPERAND (d, 0);
5211 /* For ref build_outer_var_ref already performs this, so
5212 only new_var needs a dereference. */
5213 if (TREE_CODE (d) == INDIRECT_REF)
5215 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5216 gcc_assert (omp_is_reference (var) && var == orig_var);
5218 else if (TREE_CODE (d) == ADDR_EXPR)
5220 if (orig_var == var)
5222 new_var = build_fold_addr_expr (new_var);
5223 ref = build_fold_addr_expr (ref);
5226 else
5228 gcc_assert (orig_var == var);
5229 if (omp_is_reference (var))
5230 ref = build_fold_addr_expr (ref);
5232 if (DECL_P (v))
5234 tree t = maybe_lookup_decl (v, ctx);
5235 if (t)
5236 v = t;
5237 else
5238 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5239 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5241 if (!integer_zerop (bias))
5243 bias = fold_convert_loc (clause_loc, sizetype, bias);
5244 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5245 TREE_TYPE (new_var), new_var,
5246 unshare_expr (bias));
5247 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5248 TREE_TYPE (ref), ref, bias);
5250 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5251 ref = fold_convert_loc (clause_loc, ptype, ref);
5252 tree m = create_tmp_var (ptype, NULL);
5253 gimplify_assign (m, new_var, stmt_seqp);
5254 new_var = m;
5255 m = create_tmp_var (ptype, NULL);
5256 gimplify_assign (m, ref, stmt_seqp);
5257 ref = m;
5258 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5259 tree body = create_artificial_label (UNKNOWN_LOCATION);
5260 tree end = create_artificial_label (UNKNOWN_LOCATION);
5261 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5262 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5263 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5264 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5266 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5267 tree decl_placeholder
5268 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5269 SET_DECL_VALUE_EXPR (placeholder, out);
5270 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5271 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5272 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5273 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5274 gimple_seq_add_seq (&sub_seq,
5275 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5276 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5277 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5278 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5280 else
5282 x = build2 (code, TREE_TYPE (out), out, priv);
5283 out = unshare_expr (out);
5284 gimplify_assign (out, x, &sub_seq);
5286 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5287 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5288 gimple_seq_add_stmt (&sub_seq, g);
5289 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5290 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5291 gimple_seq_add_stmt (&sub_seq, g);
5292 g = gimple_build_assign (i, PLUS_EXPR, i,
5293 build_int_cst (TREE_TYPE (i), 1));
5294 gimple_seq_add_stmt (&sub_seq, g);
5295 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5296 gimple_seq_add_stmt (&sub_seq, g);
5297 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5299 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5301 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5303 if (omp_is_reference (var)
5304 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5305 TREE_TYPE (ref)))
5306 ref = build_fold_addr_expr_loc (clause_loc, ref);
5307 SET_DECL_VALUE_EXPR (placeholder, ref);
5308 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5309 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5310 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5311 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5312 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5314 else
5316 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5317 ref = build_outer_var_ref (var, ctx);
5318 gimplify_assign (ref, x, &sub_seq);
5322 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5324 gimple_seq_add_stmt (stmt_seqp, stmt);
5326 gimple_seq_add_seq (stmt_seqp, sub_seq);
5328 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5330 gimple_seq_add_stmt (stmt_seqp, stmt);
5334 /* Generate code to implement the COPYPRIVATE clauses. */
5336 static void
5337 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5338 omp_context *ctx)
5340 tree c;
5342 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5344 tree var, new_var, ref, x;
5345 bool by_ref;
5346 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5348 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5349 continue;
5351 var = OMP_CLAUSE_DECL (c);
5352 by_ref = use_pointer_for_field (var, NULL);
5354 ref = build_sender_ref (var, ctx);
5355 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5356 if (by_ref)
5358 x = build_fold_addr_expr_loc (clause_loc, new_var);
5359 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5361 gimplify_assign (ref, x, slist);
5363 ref = build_receiver_ref (var, false, ctx);
5364 if (by_ref)
5366 ref = fold_convert_loc (clause_loc,
5367 build_pointer_type (TREE_TYPE (new_var)),
5368 ref);
5369 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5371 if (omp_is_reference (var))
5373 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5374 ref = build_simple_mem_ref_loc (clause_loc, ref);
5375 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5377 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5378 gimplify_and_add (x, rlist);
5383 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5384 and REDUCTION from the sender (aka parent) side. */
5386 static void
5387 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5388 omp_context *ctx)
5390 tree c, t;
5391 int ignored_looptemp = 0;
5392 bool is_taskloop = false;
5394 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5395 by GOMP_taskloop. */
5396 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5398 ignored_looptemp = 2;
5399 is_taskloop = true;
5402 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5404 tree val, ref, x, var;
5405 bool by_ref, do_in = false, do_out = false;
5406 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5408 switch (OMP_CLAUSE_CODE (c))
5410 case OMP_CLAUSE_PRIVATE:
5411 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5412 break;
5413 continue;
5414 case OMP_CLAUSE_FIRSTPRIVATE:
5415 case OMP_CLAUSE_COPYIN:
5416 case OMP_CLAUSE_LASTPRIVATE:
5417 case OMP_CLAUSE_REDUCTION:
5418 break;
5419 case OMP_CLAUSE_SHARED:
5420 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5421 break;
5422 continue;
5423 case OMP_CLAUSE__LOOPTEMP_:
5424 if (ignored_looptemp)
5426 ignored_looptemp--;
5427 continue;
5429 break;
5430 default:
5431 continue;
5434 val = OMP_CLAUSE_DECL (c);
5435 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5436 && TREE_CODE (val) == MEM_REF)
5438 val = TREE_OPERAND (val, 0);
5439 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5440 val = TREE_OPERAND (val, 0);
5441 if (TREE_CODE (val) == INDIRECT_REF
5442 || TREE_CODE (val) == ADDR_EXPR)
5443 val = TREE_OPERAND (val, 0);
5444 if (is_variable_sized (val))
5445 continue;
5448 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5449 outer taskloop region. */
5450 omp_context *ctx_for_o = ctx;
5451 if (is_taskloop
5452 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5453 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5454 ctx_for_o = ctx->outer;
5456 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5458 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5459 && is_global_var (var))
5460 continue;
5462 t = omp_member_access_dummy_var (var);
5463 if (t)
5465 var = DECL_VALUE_EXPR (var);
5466 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5467 if (o != t)
5468 var = unshare_and_remap (var, t, o);
5469 else
5470 var = unshare_expr (var);
5473 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5475 /* Handle taskloop firstprivate/lastprivate, where the
5476 lastprivate on GIMPLE_OMP_TASK is represented as
5477 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5478 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5479 x = omp_build_component_ref (ctx->sender_decl, f);
5480 if (use_pointer_for_field (val, ctx))
5481 var = build_fold_addr_expr (var);
5482 gimplify_assign (x, var, ilist);
5483 DECL_ABSTRACT_ORIGIN (f) = NULL;
5484 continue;
5487 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5488 || val == OMP_CLAUSE_DECL (c))
5489 && is_variable_sized (val))
5490 continue;
5491 by_ref = use_pointer_for_field (val, NULL);
5493 switch (OMP_CLAUSE_CODE (c))
5495 case OMP_CLAUSE_FIRSTPRIVATE:
5496 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5497 && !by_ref
5498 && is_task_ctx (ctx))
5499 TREE_NO_WARNING (var) = 1;
5500 do_in = true;
5501 break;
5503 case OMP_CLAUSE_PRIVATE:
5504 case OMP_CLAUSE_COPYIN:
5505 case OMP_CLAUSE__LOOPTEMP_:
5506 do_in = true;
5507 break;
5509 case OMP_CLAUSE_LASTPRIVATE:
5510 if (by_ref || omp_is_reference (val))
5512 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5513 continue;
5514 do_in = true;
5516 else
5518 do_out = true;
5519 if (lang_hooks.decls.omp_private_outer_ref (val))
5520 do_in = true;
5522 break;
5524 case OMP_CLAUSE_REDUCTION:
5525 do_in = true;
5526 if (val == OMP_CLAUSE_DECL (c))
5527 do_out = !(by_ref || omp_is_reference (val));
5528 else
5529 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5530 break;
5532 default:
5533 gcc_unreachable ();
5536 if (do_in)
5538 ref = build_sender_ref (val, ctx);
5539 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5540 gimplify_assign (ref, x, ilist);
5541 if (is_task_ctx (ctx))
5542 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5545 if (do_out)
5547 ref = build_sender_ref (val, ctx);
5548 gimplify_assign (var, ref, olist);
5553 /* Generate code to implement SHARED from the sender (aka parent)
5554 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5555 list things that got automatically shared. */
5557 static void
5558 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5560 tree var, ovar, nvar, t, f, x, record_type;
5562 if (ctx->record_type == NULL)
5563 return;
5565 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5566 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5568 ovar = DECL_ABSTRACT_ORIGIN (f);
5569 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5570 continue;
5572 nvar = maybe_lookup_decl (ovar, ctx);
5573 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5574 continue;
5576 /* If CTX is a nested parallel directive. Find the immediately
5577 enclosing parallel or workshare construct that contains a
5578 mapping for OVAR. */
5579 var = lookup_decl_in_outer_ctx (ovar, ctx);
5581 t = omp_member_access_dummy_var (var);
5582 if (t)
5584 var = DECL_VALUE_EXPR (var);
5585 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5586 if (o != t)
5587 var = unshare_and_remap (var, t, o);
5588 else
5589 var = unshare_expr (var);
5592 if (use_pointer_for_field (ovar, ctx))
5594 x = build_sender_ref (ovar, ctx);
5595 var = build_fold_addr_expr (var);
5596 gimplify_assign (x, var, ilist);
5598 else
5600 x = build_sender_ref (ovar, ctx);
5601 gimplify_assign (x, var, ilist);
5603 if (!TREE_READONLY (var)
5604 /* We don't need to receive a new reference to a result
5605 or parm decl. In fact we may not store to it as we will
5606 invalidate any pending RSO and generate wrong gimple
5607 during inlining. */
5608 && !((TREE_CODE (var) == RESULT_DECL
5609 || TREE_CODE (var) == PARM_DECL)
5610 && DECL_BY_REFERENCE (var)))
5612 x = build_sender_ref (ovar, ctx);
5613 gimplify_assign (var, x, olist);
5619 /* Emit an OpenACC head marker call, encapulating the partitioning and
5620 other information that must be processed by the target compiler.
5621 Return the maximum number of dimensions the associated loop might
5622 be partitioned over. */
5624 static unsigned
5625 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5626 gimple_seq *seq, omp_context *ctx)
5628 unsigned levels = 0;
5629 unsigned tag = 0;
5630 tree gang_static = NULL_TREE;
5631 auto_vec<tree, 5> args;
5633 args.quick_push (build_int_cst
5634 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5635 args.quick_push (ddvar);
5636 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5638 switch (OMP_CLAUSE_CODE (c))
5640 case OMP_CLAUSE_GANG:
5641 tag |= OLF_DIM_GANG;
5642 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5643 /* static:* is represented by -1, and we can ignore it, as
5644 scheduling is always static. */
5645 if (gang_static && integer_minus_onep (gang_static))
5646 gang_static = NULL_TREE;
5647 levels++;
5648 break;
5650 case OMP_CLAUSE_WORKER:
5651 tag |= OLF_DIM_WORKER;
5652 levels++;
5653 break;
5655 case OMP_CLAUSE_VECTOR:
5656 tag |= OLF_DIM_VECTOR;
5657 levels++;
5658 break;
5660 case OMP_CLAUSE_SEQ:
5661 tag |= OLF_SEQ;
5662 break;
5664 case OMP_CLAUSE_AUTO:
5665 tag |= OLF_AUTO;
5666 break;
5668 case OMP_CLAUSE_INDEPENDENT:
5669 tag |= OLF_INDEPENDENT;
5670 break;
5672 case OMP_CLAUSE_TILE:
5673 tag |= OLF_TILE;
5674 break;
5676 default:
5677 continue;
5681 if (gang_static)
5683 if (DECL_P (gang_static))
5684 gang_static = build_outer_var_ref (gang_static, ctx);
5685 tag |= OLF_GANG_STATIC;
5688 /* In a parallel region, loops are implicitly INDEPENDENT. */
5689 omp_context *tgt = enclosing_target_ctx (ctx);
5690 if (!tgt || is_oacc_parallel (tgt))
5691 tag |= OLF_INDEPENDENT;
5693 if (tag & OLF_TILE)
5694 /* Tiling could use all 3 levels. */
5695 levels = 3;
5696 else
5698 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5699 Ensure at least one level, or 2 for possible auto
5700 partitioning */
5701 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5702 << OLF_DIM_BASE) | OLF_SEQ));
5704 if (levels < 1u + maybe_auto)
5705 levels = 1u + maybe_auto;
5708 args.quick_push (build_int_cst (integer_type_node, levels));
5709 args.quick_push (build_int_cst (integer_type_node, tag));
5710 if (gang_static)
5711 args.quick_push (gang_static);
5713 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5714 gimple_set_location (call, loc);
5715 gimple_set_lhs (call, ddvar);
5716 gimple_seq_add_stmt (seq, call);
5718 return levels;
5721 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5722 partitioning level of the enclosed region. */
5724 static void
5725 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5726 tree tofollow, gimple_seq *seq)
5728 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5729 : IFN_UNIQUE_OACC_TAIL_MARK);
5730 tree marker = build_int_cst (integer_type_node, marker_kind);
5731 int nargs = 2 + (tofollow != NULL_TREE);
5732 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5733 marker, ddvar, tofollow);
5734 gimple_set_location (call, loc);
5735 gimple_set_lhs (call, ddvar);
5736 gimple_seq_add_stmt (seq, call);
5739 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5740 the loop clauses, from which we extract reductions. Initialize
5741 HEAD and TAIL. */
5743 static void
5744 lower_oacc_head_tail (location_t loc, tree clauses,
5745 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5747 bool inner = false;
5748 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5749 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5751 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5752 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5753 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5755 gcc_assert (count);
5756 for (unsigned done = 1; count; count--, done++)
5758 gimple_seq fork_seq = NULL;
5759 gimple_seq join_seq = NULL;
5761 tree place = build_int_cst (integer_type_node, -1);
5762 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5763 fork_kind, ddvar, place);
5764 gimple_set_location (fork, loc);
5765 gimple_set_lhs (fork, ddvar);
5767 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5768 join_kind, ddvar, place);
5769 gimple_set_location (join, loc);
5770 gimple_set_lhs (join, ddvar);
5772 /* Mark the beginning of this level sequence. */
5773 if (inner)
5774 lower_oacc_loop_marker (loc, ddvar, true,
5775 build_int_cst (integer_type_node, count),
5776 &fork_seq);
5777 lower_oacc_loop_marker (loc, ddvar, false,
5778 build_int_cst (integer_type_node, done),
5779 &join_seq);
5781 lower_oacc_reductions (loc, clauses, place, inner,
5782 fork, join, &fork_seq, &join_seq, ctx);
5784 /* Append this level to head. */
5785 gimple_seq_add_seq (head, fork_seq);
5786 /* Prepend it to tail. */
5787 gimple_seq_add_seq (&join_seq, *tail);
5788 *tail = join_seq;
5790 inner = true;
5793 /* Mark the end of the sequence. */
5794 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5795 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5798 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5799 catch handler and return it. This prevents programs from violating the
5800 structured block semantics with throws. */
5802 static gimple_seq
5803 maybe_catch_exception (gimple_seq body)
5805 gimple *g;
5806 tree decl;
5808 if (!flag_exceptions)
5809 return body;
5811 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5812 decl = lang_hooks.eh_protect_cleanup_actions ();
5813 else
5814 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5816 g = gimple_build_eh_must_not_throw (decl);
5817 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5818 GIMPLE_TRY_CATCH);
5820 return gimple_seq_alloc_with_stmt (g);
5824 /* Routines to lower OMP directives into OMP-GIMPLE. */
5826 /* If ctx is a worksharing context inside of a cancellable parallel
5827 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5828 and conditional branch to parallel's cancel_label to handle
5829 cancellation in the implicit barrier. */
5831 static void
5832 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5834 gimple *omp_return = gimple_seq_last_stmt (*body);
5835 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5836 if (gimple_omp_return_nowait_p (omp_return))
5837 return;
5838 if (ctx->outer
5839 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5840 && ctx->outer->cancellable)
5842 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5843 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5844 tree lhs = create_tmp_var (c_bool_type);
5845 gimple_omp_return_set_lhs (omp_return, lhs);
5846 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5847 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5848 fold_convert (c_bool_type,
5849 boolean_false_node),
5850 ctx->outer->cancel_label, fallthru_label);
5851 gimple_seq_add_stmt (body, g);
5852 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5856 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5857 CTX is the enclosing OMP context for the current statement. */
5859 static void
5860 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5862 tree block, control;
5863 gimple_stmt_iterator tgsi;
5864 gomp_sections *stmt;
5865 gimple *t;
5866 gbind *new_stmt, *bind;
5867 gimple_seq ilist, dlist, olist, new_body;
5869 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5871 push_gimplify_context ();
5873 dlist = NULL;
5874 ilist = NULL;
5875 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5876 &ilist, &dlist, ctx, NULL);
5878 new_body = gimple_omp_body (stmt);
5879 gimple_omp_set_body (stmt, NULL);
5880 tgsi = gsi_start (new_body);
5881 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5883 omp_context *sctx;
5884 gimple *sec_start;
5886 sec_start = gsi_stmt (tgsi);
5887 sctx = maybe_lookup_ctx (sec_start);
5888 gcc_assert (sctx);
5890 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5891 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5892 GSI_CONTINUE_LINKING);
5893 gimple_omp_set_body (sec_start, NULL);
5895 if (gsi_one_before_end_p (tgsi))
5897 gimple_seq l = NULL;
5898 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5899 &l, ctx);
5900 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5901 gimple_omp_section_set_last (sec_start);
5904 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5905 GSI_CONTINUE_LINKING);
5908 block = make_node (BLOCK);
5909 bind = gimple_build_bind (NULL, new_body, block);
5911 olist = NULL;
5912 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5914 block = make_node (BLOCK);
5915 new_stmt = gimple_build_bind (NULL, NULL, block);
5916 gsi_replace (gsi_p, new_stmt, true);
5918 pop_gimplify_context (new_stmt);
5919 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5920 BLOCK_VARS (block) = gimple_bind_vars (bind);
5921 if (BLOCK_VARS (block))
5922 TREE_USED (block) = 1;
5924 new_body = NULL;
5925 gimple_seq_add_seq (&new_body, ilist);
5926 gimple_seq_add_stmt (&new_body, stmt);
5927 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5928 gimple_seq_add_stmt (&new_body, bind);
5930 control = create_tmp_var (unsigned_type_node, ".section");
5931 t = gimple_build_omp_continue (control, control);
5932 gimple_omp_sections_set_control (stmt, control);
5933 gimple_seq_add_stmt (&new_body, t);
5935 gimple_seq_add_seq (&new_body, olist);
5936 if (ctx->cancellable)
5937 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5938 gimple_seq_add_seq (&new_body, dlist);
5940 new_body = maybe_catch_exception (new_body);
5942 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5943 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5944 t = gimple_build_omp_return (nowait);
5945 gimple_seq_add_stmt (&new_body, t);
5946 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5948 gimple_bind_set_body (new_stmt, new_body);
5952 /* A subroutine of lower_omp_single. Expand the simple form of
5953 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5955 if (GOMP_single_start ())
5956 BODY;
5957 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5959 FIXME. It may be better to delay expanding the logic of this until
5960 pass_expand_omp. The expanded logic may make the job more difficult
5961 to a synchronization analysis pass. */
5963 static void
5964 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5966 location_t loc = gimple_location (single_stmt);
5967 tree tlabel = create_artificial_label (loc);
5968 tree flabel = create_artificial_label (loc);
5969 gimple *call, *cond;
5970 tree lhs, decl;
5972 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5973 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5974 call = gimple_build_call (decl, 0);
5975 gimple_call_set_lhs (call, lhs);
5976 gimple_seq_add_stmt (pre_p, call);
5978 cond = gimple_build_cond (EQ_EXPR, lhs,
5979 fold_convert_loc (loc, TREE_TYPE (lhs),
5980 boolean_true_node),
5981 tlabel, flabel);
5982 gimple_seq_add_stmt (pre_p, cond);
5983 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5984 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5985 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5989 /* A subroutine of lower_omp_single. Expand the simple form of
5990 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5992 #pragma omp single copyprivate (a, b, c)
5994 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5997 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5999 BODY;
6000 copyout.a = a;
6001 copyout.b = b;
6002 copyout.c = c;
6003 GOMP_single_copy_end (&copyout);
6005 else
6007 a = copyout_p->a;
6008 b = copyout_p->b;
6009 c = copyout_p->c;
6011 GOMP_barrier ();
6014 FIXME. It may be better to delay expanding the logic of this until
6015 pass_expand_omp. The expanded logic may make the job more difficult
6016 to a synchronization analysis pass. */
6018 static void
6019 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6020 omp_context *ctx)
6022 tree ptr_type, t, l0, l1, l2, bfn_decl;
6023 gimple_seq copyin_seq;
6024 location_t loc = gimple_location (single_stmt);
6026 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6028 ptr_type = build_pointer_type (ctx->record_type);
6029 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6031 l0 = create_artificial_label (loc);
6032 l1 = create_artificial_label (loc);
6033 l2 = create_artificial_label (loc);
6035 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6036 t = build_call_expr_loc (loc, bfn_decl, 0);
6037 t = fold_convert_loc (loc, ptr_type, t);
6038 gimplify_assign (ctx->receiver_decl, t, pre_p);
6040 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6041 build_int_cst (ptr_type, 0));
6042 t = build3 (COND_EXPR, void_type_node, t,
6043 build_and_jump (&l0), build_and_jump (&l1));
6044 gimplify_and_add (t, pre_p);
6046 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6048 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6050 copyin_seq = NULL;
6051 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6052 &copyin_seq, ctx);
6054 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6055 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6056 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6057 gimplify_and_add (t, pre_p);
6059 t = build_and_jump (&l2);
6060 gimplify_and_add (t, pre_p);
6062 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6064 gimple_seq_add_seq (pre_p, copyin_seq);
6066 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6070 /* Expand code for an OpenMP single directive. */
6072 static void
6073 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6075 tree block;
6076 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6077 gbind *bind;
6078 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6080 push_gimplify_context ();
6082 block = make_node (BLOCK);
6083 bind = gimple_build_bind (NULL, NULL, block);
6084 gsi_replace (gsi_p, bind, true);
6085 bind_body = NULL;
6086 dlist = NULL;
6087 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6088 &bind_body, &dlist, ctx, NULL);
6089 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6091 gimple_seq_add_stmt (&bind_body, single_stmt);
6093 if (ctx->record_type)
6094 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6095 else
6096 lower_omp_single_simple (single_stmt, &bind_body);
6098 gimple_omp_set_body (single_stmt, NULL);
6100 gimple_seq_add_seq (&bind_body, dlist);
6102 bind_body = maybe_catch_exception (bind_body);
6104 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6105 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6106 gimple *g = gimple_build_omp_return (nowait);
6107 gimple_seq_add_stmt (&bind_body_tail, g);
6108 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6109 if (ctx->record_type)
6111 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6112 tree clobber = build_constructor (ctx->record_type, NULL);
6113 TREE_THIS_VOLATILE (clobber) = 1;
6114 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6115 clobber), GSI_SAME_STMT);
6117 gimple_seq_add_seq (&bind_body, bind_body_tail);
6118 gimple_bind_set_body (bind, bind_body);
6120 pop_gimplify_context (bind);
6122 gimple_bind_append_vars (bind, ctx->block_vars);
6123 BLOCK_VARS (block) = ctx->block_vars;
6124 if (BLOCK_VARS (block))
6125 TREE_USED (block) = 1;
6129 /* Expand code for an OpenMP master directive. */
6131 static void
6132 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6134 tree block, lab = NULL, x, bfn_decl;
6135 gimple *stmt = gsi_stmt (*gsi_p);
6136 gbind *bind;
6137 location_t loc = gimple_location (stmt);
6138 gimple_seq tseq;
6140 push_gimplify_context ();
6142 block = make_node (BLOCK);
6143 bind = gimple_build_bind (NULL, NULL, block);
6144 gsi_replace (gsi_p, bind, true);
6145 gimple_bind_add_stmt (bind, stmt);
6147 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6148 x = build_call_expr_loc (loc, bfn_decl, 0);
6149 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6150 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6151 tseq = NULL;
6152 gimplify_and_add (x, &tseq);
6153 gimple_bind_add_seq (bind, tseq);
6155 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6156 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6157 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6158 gimple_omp_set_body (stmt, NULL);
6160 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6162 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6164 pop_gimplify_context (bind);
6166 gimple_bind_append_vars (bind, ctx->block_vars);
6167 BLOCK_VARS (block) = ctx->block_vars;
6171 /* Expand code for an OpenMP taskgroup directive. */
6173 static void
6174 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6176 gimple *stmt = gsi_stmt (*gsi_p);
6177 gcall *x;
6178 gbind *bind;
6179 tree block = make_node (BLOCK);
6181 bind = gimple_build_bind (NULL, NULL, block);
6182 gsi_replace (gsi_p, bind, true);
6183 gimple_bind_add_stmt (bind, stmt);
6185 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6187 gimple_bind_add_stmt (bind, x);
6189 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6190 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6191 gimple_omp_set_body (stmt, NULL);
6193 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6195 gimple_bind_append_vars (bind, ctx->block_vars);
6196 BLOCK_VARS (block) = ctx->block_vars;
6200 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6202 static void
6203 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6204 omp_context *ctx)
6206 struct omp_for_data fd;
6207 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6208 return;
6210 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6211 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6212 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6213 if (!fd.ordered)
6214 return;
6216 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6217 tree c = gimple_omp_ordered_clauses (ord_stmt);
6218 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6219 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6221 /* Merge depend clauses from multiple adjacent
6222 #pragma omp ordered depend(sink:...) constructs
6223 into one #pragma omp ordered depend(sink:...), so that
6224 we can optimize them together. */
6225 gimple_stmt_iterator gsi = *gsi_p;
6226 gsi_next (&gsi);
6227 while (!gsi_end_p (gsi))
6229 gimple *stmt = gsi_stmt (gsi);
6230 if (is_gimple_debug (stmt)
6231 || gimple_code (stmt) == GIMPLE_NOP)
6233 gsi_next (&gsi);
6234 continue;
6236 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6237 break;
6238 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6239 c = gimple_omp_ordered_clauses (ord_stmt2);
6240 if (c == NULL_TREE
6241 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6242 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6243 break;
6244 while (*list_p)
6245 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6246 *list_p = c;
6247 gsi_remove (&gsi, true);
6251 /* Canonicalize sink dependence clauses into one folded clause if
6252 possible.
6254 The basic algorithm is to create a sink vector whose first
6255 element is the GCD of all the first elements, and whose remaining
6256 elements are the minimum of the subsequent columns.
6258 We ignore dependence vectors whose first element is zero because
6259 such dependencies are known to be executed by the same thread.
6261 We take into account the direction of the loop, so a minimum
6262 becomes a maximum if the loop is iterating forwards. We also
6263 ignore sink clauses where the loop direction is unknown, or where
6264 the offsets are clearly invalid because they are not a multiple
6265 of the loop increment.
6267 For example:
6269 #pragma omp for ordered(2)
6270 for (i=0; i < N; ++i)
6271 for (j=0; j < M; ++j)
6273 #pragma omp ordered \
6274 depend(sink:i-8,j-2) \
6275 depend(sink:i,j-1) \ // Completely ignored because i+0.
6276 depend(sink:i-4,j-3) \
6277 depend(sink:i-6,j-4)
6278 #pragma omp ordered depend(source)
6281 Folded clause is:
6283 depend(sink:-gcd(8,4,6),-min(2,3,4))
6284 -or-
6285 depend(sink:-2,-2)
6288 /* FIXME: Computing GCD's where the first element is zero is
6289 non-trivial in the presence of collapsed loops. Do this later. */
6290 if (fd.collapse > 1)
6291 return;
6293 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6294 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6295 tree folded_dep = NULL_TREE;
6296 /* TRUE if the first dimension's offset is negative. */
6297 bool neg_offset_p = false;
6299 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6300 unsigned int i;
6301 while ((c = *list_p) != NULL)
6303 bool remove = false;
6305 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6306 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6307 goto next_ordered_clause;
6309 tree vec;
6310 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6311 vec && TREE_CODE (vec) == TREE_LIST;
6312 vec = TREE_CHAIN (vec), ++i)
6314 gcc_assert (i < len);
6316 /* omp_extract_for_data has canonicalized the condition. */
6317 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6318 || fd.loops[i].cond_code == GT_EXPR);
6319 bool forward = fd.loops[i].cond_code == LT_EXPR;
6320 bool maybe_lexically_later = true;
6322 /* While the committee makes up its mind, bail if we have any
6323 non-constant steps. */
6324 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6325 goto lower_omp_ordered_ret;
6327 tree itype = TREE_TYPE (TREE_VALUE (vec));
6328 if (POINTER_TYPE_P (itype))
6329 itype = sizetype;
6330 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6331 TYPE_PRECISION (itype),
6332 TYPE_SIGN (itype));
6334 /* Ignore invalid offsets that are not multiples of the step. */
6335 if (!wi::multiple_of_p
6336 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6337 UNSIGNED))
6339 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6340 "ignoring sink clause with offset that is not "
6341 "a multiple of the loop step");
6342 remove = true;
6343 goto next_ordered_clause;
6346 /* Calculate the first dimension. The first dimension of
6347 the folded dependency vector is the GCD of the first
6348 elements, while ignoring any first elements whose offset
6349 is 0. */
6350 if (i == 0)
6352 /* Ignore dependence vectors whose first dimension is 0. */
6353 if (offset == 0)
6355 remove = true;
6356 goto next_ordered_clause;
6358 else
6360 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6362 error_at (OMP_CLAUSE_LOCATION (c),
6363 "first offset must be in opposite direction "
6364 "of loop iterations");
6365 goto lower_omp_ordered_ret;
6367 if (forward)
6368 offset = -offset;
6369 neg_offset_p = forward;
6370 /* Initialize the first time around. */
6371 if (folded_dep == NULL_TREE)
6373 folded_dep = c;
6374 folded_deps[0] = offset;
6376 else
6377 folded_deps[0] = wi::gcd (folded_deps[0],
6378 offset, UNSIGNED);
6381 /* Calculate minimum for the remaining dimensions. */
6382 else
6384 folded_deps[len + i - 1] = offset;
6385 if (folded_dep == c)
6386 folded_deps[i] = offset;
6387 else if (maybe_lexically_later
6388 && !wi::eq_p (folded_deps[i], offset))
6390 if (forward ^ wi::gts_p (folded_deps[i], offset))
6392 unsigned int j;
6393 folded_dep = c;
6394 for (j = 1; j <= i; j++)
6395 folded_deps[j] = folded_deps[len + j - 1];
6397 else
6398 maybe_lexically_later = false;
6402 gcc_assert (i == len);
6404 remove = true;
6406 next_ordered_clause:
6407 if (remove)
6408 *list_p = OMP_CLAUSE_CHAIN (c);
6409 else
6410 list_p = &OMP_CLAUSE_CHAIN (c);
6413 if (folded_dep)
6415 if (neg_offset_p)
6416 folded_deps[0] = -folded_deps[0];
6418 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6419 if (POINTER_TYPE_P (itype))
6420 itype = sizetype;
6422 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6423 = wide_int_to_tree (itype, folded_deps[0]);
6424 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6425 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6428 lower_omp_ordered_ret:
6430 /* Ordered without clauses is #pragma omp threads, while we want
6431 a nop instead if we remove all clauses. */
6432 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6433 gsi_replace (gsi_p, gimple_build_nop (), true);
6437 /* Expand code for an OpenMP ordered directive. */
6439 static void
6440 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6442 tree block;
6443 gimple *stmt = gsi_stmt (*gsi_p), *g;
6444 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6445 gcall *x;
6446 gbind *bind;
6447 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6448 OMP_CLAUSE_SIMD);
6449 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6450 loop. */
6451 bool maybe_simt
6452 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6453 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6454 OMP_CLAUSE_THREADS);
6456 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6457 OMP_CLAUSE_DEPEND))
6459 /* FIXME: This is needs to be moved to the expansion to verify various
6460 conditions only testable on cfg with dominators computed, and also
6461 all the depend clauses to be merged still might need to be available
6462 for the runtime checks. */
6463 if (0)
6464 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6465 return;
6468 push_gimplify_context ();
6470 block = make_node (BLOCK);
6471 bind = gimple_build_bind (NULL, NULL, block);
6472 gsi_replace (gsi_p, bind, true);
6473 gimple_bind_add_stmt (bind, stmt);
6475 if (simd)
6477 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6478 build_int_cst (NULL_TREE, threads));
6479 cfun->has_simduid_loops = true;
6481 else
6482 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6484 gimple_bind_add_stmt (bind, x);
6486 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6487 if (maybe_simt)
6489 counter = create_tmp_var (integer_type_node);
6490 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6491 gimple_call_set_lhs (g, counter);
6492 gimple_bind_add_stmt (bind, g);
6494 body = create_artificial_label (UNKNOWN_LOCATION);
6495 test = create_artificial_label (UNKNOWN_LOCATION);
6496 gimple_bind_add_stmt (bind, gimple_build_label (body));
6498 tree simt_pred = create_tmp_var (integer_type_node);
6499 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6500 gimple_call_set_lhs (g, simt_pred);
6501 gimple_bind_add_stmt (bind, g);
6503 tree t = create_artificial_label (UNKNOWN_LOCATION);
6504 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6505 gimple_bind_add_stmt (bind, g);
6507 gimple_bind_add_stmt (bind, gimple_build_label (t));
6509 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6510 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6511 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6512 gimple_omp_set_body (stmt, NULL);
6514 if (maybe_simt)
6516 gimple_bind_add_stmt (bind, gimple_build_label (test));
6517 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6518 gimple_bind_add_stmt (bind, g);
6520 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6521 tree nonneg = create_tmp_var (integer_type_node);
6522 gimple_seq tseq = NULL;
6523 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6524 gimple_bind_add_seq (bind, tseq);
6526 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6527 gimple_call_set_lhs (g, nonneg);
6528 gimple_bind_add_stmt (bind, g);
6530 tree end = create_artificial_label (UNKNOWN_LOCATION);
6531 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6532 gimple_bind_add_stmt (bind, g);
6534 gimple_bind_add_stmt (bind, gimple_build_label (end));
6536 if (simd)
6537 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6538 build_int_cst (NULL_TREE, threads));
6539 else
6540 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6542 gimple_bind_add_stmt (bind, x);
6544 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6546 pop_gimplify_context (bind);
6548 gimple_bind_append_vars (bind, ctx->block_vars);
6549 BLOCK_VARS (block) = gimple_bind_vars (bind);
6553 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6554 substitution of a couple of function calls. But in the NAMED case,
6555 requires that languages coordinate a symbol name. It is therefore
6556 best put here in common code. */
6558 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6560 static void
6561 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6563 tree block;
6564 tree name, lock, unlock;
6565 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6566 gbind *bind;
6567 location_t loc = gimple_location (stmt);
6568 gimple_seq tbody;
6570 name = gimple_omp_critical_name (stmt);
6571 if (name)
6573 tree decl;
6575 if (!critical_name_mutexes)
6576 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6578 tree *n = critical_name_mutexes->get (name);
6579 if (n == NULL)
6581 char *new_str;
6583 decl = create_tmp_var_raw (ptr_type_node);
6585 new_str = ACONCAT ((".gomp_critical_user_",
6586 IDENTIFIER_POINTER (name), NULL));
6587 DECL_NAME (decl) = get_identifier (new_str);
6588 TREE_PUBLIC (decl) = 1;
6589 TREE_STATIC (decl) = 1;
6590 DECL_COMMON (decl) = 1;
6591 DECL_ARTIFICIAL (decl) = 1;
6592 DECL_IGNORED_P (decl) = 1;
6594 varpool_node::finalize_decl (decl);
6596 critical_name_mutexes->put (name, decl);
6598 else
6599 decl = *n;
6601 /* If '#pragma omp critical' is inside offloaded region or
6602 inside function marked as offloadable, the symbol must be
6603 marked as offloadable too. */
6604 omp_context *octx;
6605 if (cgraph_node::get (current_function_decl)->offloadable)
6606 varpool_node::get_create (decl)->offloadable = 1;
6607 else
6608 for (octx = ctx->outer; octx; octx = octx->outer)
6609 if (is_gimple_omp_offloaded (octx->stmt))
6611 varpool_node::get_create (decl)->offloadable = 1;
6612 break;
6615 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6616 lock = build_call_expr_loc (loc, lock, 1,
6617 build_fold_addr_expr_loc (loc, decl));
6619 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6620 unlock = build_call_expr_loc (loc, unlock, 1,
6621 build_fold_addr_expr_loc (loc, decl));
6623 else
6625 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6626 lock = build_call_expr_loc (loc, lock, 0);
6628 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6629 unlock = build_call_expr_loc (loc, unlock, 0);
6632 push_gimplify_context ();
6634 block = make_node (BLOCK);
6635 bind = gimple_build_bind (NULL, NULL, block);
6636 gsi_replace (gsi_p, bind, true);
6637 gimple_bind_add_stmt (bind, stmt);
6639 tbody = gimple_bind_body (bind);
6640 gimplify_and_add (lock, &tbody);
6641 gimple_bind_set_body (bind, tbody);
6643 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6644 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6645 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6646 gimple_omp_set_body (stmt, NULL);
6648 tbody = gimple_bind_body (bind);
6649 gimplify_and_add (unlock, &tbody);
6650 gimple_bind_set_body (bind, tbody);
6652 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6654 pop_gimplify_context (bind);
6655 gimple_bind_append_vars (bind, ctx->block_vars);
6656 BLOCK_VARS (block) = gimple_bind_vars (bind);
6659 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6660 for a lastprivate clause. Given a loop control predicate of (V
6661 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6662 is appended to *DLIST, iterator initialization is appended to
6663 *BODY_P. */
6665 static void
6666 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6667 gimple_seq *dlist, struct omp_context *ctx)
6669 tree clauses, cond, vinit;
6670 enum tree_code cond_code;
6671 gimple_seq stmts;
6673 cond_code = fd->loop.cond_code;
6674 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6676 /* When possible, use a strict equality expression. This can let VRP
6677 type optimizations deduce the value and remove a copy. */
6678 if (tree_fits_shwi_p (fd->loop.step))
6680 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6681 if (step == 1 || step == -1)
6682 cond_code = EQ_EXPR;
6685 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6686 || gimple_omp_for_grid_phony (fd->for_stmt))
6687 cond = omp_grid_lastprivate_predicate (fd);
6688 else
6690 tree n2 = fd->loop.n2;
6691 if (fd->collapse > 1
6692 && TREE_CODE (n2) != INTEGER_CST
6693 && gimple_omp_for_combined_into_p (fd->for_stmt))
6695 struct omp_context *taskreg_ctx = NULL;
6696 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6698 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6699 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6700 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6702 if (gimple_omp_for_combined_into_p (gfor))
6704 gcc_assert (ctx->outer->outer
6705 && is_parallel_ctx (ctx->outer->outer));
6706 taskreg_ctx = ctx->outer->outer;
6708 else
6710 struct omp_for_data outer_fd;
6711 omp_extract_for_data (gfor, &outer_fd, NULL);
6712 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6715 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6716 taskreg_ctx = ctx->outer->outer;
6718 else if (is_taskreg_ctx (ctx->outer))
6719 taskreg_ctx = ctx->outer;
6720 if (taskreg_ctx)
6722 int i;
6723 tree taskreg_clauses
6724 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6725 tree innerc = omp_find_clause (taskreg_clauses,
6726 OMP_CLAUSE__LOOPTEMP_);
6727 gcc_assert (innerc);
6728 for (i = 0; i < fd->collapse; i++)
6730 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6731 OMP_CLAUSE__LOOPTEMP_);
6732 gcc_assert (innerc);
6734 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6735 OMP_CLAUSE__LOOPTEMP_);
6736 if (innerc)
6737 n2 = fold_convert (TREE_TYPE (n2),
6738 lookup_decl (OMP_CLAUSE_DECL (innerc),
6739 taskreg_ctx));
6742 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6745 clauses = gimple_omp_for_clauses (fd->for_stmt);
6746 stmts = NULL;
6747 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6748 if (!gimple_seq_empty_p (stmts))
6750 gimple_seq_add_seq (&stmts, *dlist);
6751 *dlist = stmts;
6753 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6754 vinit = fd->loop.n1;
6755 if (cond_code == EQ_EXPR
6756 && tree_fits_shwi_p (fd->loop.n2)
6757 && ! integer_zerop (fd->loop.n2))
6758 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6759 else
6760 vinit = unshare_expr (vinit);
6762 /* Initialize the iterator variable, so that threads that don't execute
6763 any iterations don't execute the lastprivate clauses by accident. */
6764 gimplify_assign (fd->loop.v, vinit, body_p);
6769 /* Lower code for an OMP loop directive. */
6771 static void
6772 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6774 tree *rhs_p, block;
6775 struct omp_for_data fd, *fdp = NULL;
6776 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6777 gbind *new_stmt;
6778 gimple_seq omp_for_body, body, dlist;
6779 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6780 size_t i;
6782 push_gimplify_context ();
6784 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6786 block = make_node (BLOCK);
6787 new_stmt = gimple_build_bind (NULL, NULL, block);
6788 /* Replace at gsi right away, so that 'stmt' is no member
6789 of a sequence anymore as we're going to add to a different
6790 one below. */
6791 gsi_replace (gsi_p, new_stmt, true);
6793 /* Move declaration of temporaries in the loop body before we make
6794 it go away. */
6795 omp_for_body = gimple_omp_body (stmt);
6796 if (!gimple_seq_empty_p (omp_for_body)
6797 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6799 gbind *inner_bind
6800 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6801 tree vars = gimple_bind_vars (inner_bind);
6802 gimple_bind_append_vars (new_stmt, vars);
6803 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6804 keep them on the inner_bind and it's block. */
6805 gimple_bind_set_vars (inner_bind, NULL_TREE);
6806 if (gimple_bind_block (inner_bind))
6807 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6810 if (gimple_omp_for_combined_into_p (stmt))
6812 omp_extract_for_data (stmt, &fd, NULL);
6813 fdp = &fd;
6815 /* We need two temporaries with fd.loop.v type (istart/iend)
6816 and then (fd.collapse - 1) temporaries with the same
6817 type for count2 ... countN-1 vars if not constant. */
6818 size_t count = 2;
6819 tree type = fd.iter_type;
6820 if (fd.collapse > 1
6821 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6822 count += fd.collapse - 1;
6823 bool taskreg_for
6824 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6825 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6826 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6827 tree simtc = NULL;
6828 tree clauses = *pc;
6829 if (taskreg_for)
6830 outerc
6831 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6832 OMP_CLAUSE__LOOPTEMP_);
6833 if (ctx->simt_stmt)
6834 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6835 OMP_CLAUSE__LOOPTEMP_);
6836 for (i = 0; i < count; i++)
6838 tree temp;
6839 if (taskreg_for)
6841 gcc_assert (outerc);
6842 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6843 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6844 OMP_CLAUSE__LOOPTEMP_);
6846 else
6848 /* If there are 2 adjacent SIMD stmts, one with _simt_
6849 clause, another without, make sure they have the same
6850 decls in _looptemp_ clauses, because the outer stmt
6851 they are combined into will look up just one inner_stmt. */
6852 if (ctx->simt_stmt)
6853 temp = OMP_CLAUSE_DECL (simtc);
6854 else
6855 temp = create_tmp_var (type);
6856 insert_decl_map (&ctx->outer->cb, temp, temp);
6858 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6859 OMP_CLAUSE_DECL (*pc) = temp;
6860 pc = &OMP_CLAUSE_CHAIN (*pc);
6861 if (ctx->simt_stmt)
6862 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6863 OMP_CLAUSE__LOOPTEMP_);
6865 *pc = clauses;
6868 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6869 dlist = NULL;
6870 body = NULL;
6871 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6872 fdp);
6873 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6875 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6877 /* Lower the header expressions. At this point, we can assume that
6878 the header is of the form:
6880 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6882 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6883 using the .omp_data_s mapping, if needed. */
6884 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6886 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6887 if (!is_gimple_min_invariant (*rhs_p))
6888 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6890 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6891 if (!is_gimple_min_invariant (*rhs_p))
6892 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6894 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6895 if (!is_gimple_min_invariant (*rhs_p))
6896 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6899 /* Once lowered, extract the bounds and clauses. */
6900 omp_extract_for_data (stmt, &fd, NULL);
6902 if (is_gimple_omp_oacc (ctx->stmt)
6903 && !ctx_in_oacc_kernels_region (ctx))
6904 lower_oacc_head_tail (gimple_location (stmt),
6905 gimple_omp_for_clauses (stmt),
6906 &oacc_head, &oacc_tail, ctx);
6908 /* Add OpenACC partitioning and reduction markers just before the loop. */
6909 if (oacc_head)
6910 gimple_seq_add_seq (&body, oacc_head);
6912 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6914 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6915 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6916 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6917 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6919 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6920 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6921 OMP_CLAUSE_LINEAR_STEP (c)
6922 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6923 ctx);
6926 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6927 && gimple_omp_for_grid_phony (stmt));
6928 if (!phony_loop)
6929 gimple_seq_add_stmt (&body, stmt);
6930 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6932 if (!phony_loop)
6933 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6934 fd.loop.v));
6936 /* After the loop, add exit clauses. */
6937 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6939 if (ctx->cancellable)
6940 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6942 gimple_seq_add_seq (&body, dlist);
6944 body = maybe_catch_exception (body);
6946 if (!phony_loop)
6948 /* Region exit marker goes at the end of the loop body. */
6949 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6950 maybe_add_implicit_barrier_cancel (ctx, &body);
6953 /* Add OpenACC joining and reduction markers just after the loop. */
6954 if (oacc_tail)
6955 gimple_seq_add_seq (&body, oacc_tail);
6957 pop_gimplify_context (new_stmt);
6959 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6960 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6961 if (BLOCK_VARS (block))
6962 TREE_USED (block) = 1;
6964 gimple_bind_set_body (new_stmt, body);
6965 gimple_omp_set_body (stmt, NULL);
6966 gimple_omp_for_set_pre_body (stmt, NULL);
6969 /* Callback for walk_stmts. Check if the current statement only contains
6970 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6972 static tree
6973 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6974 bool *handled_ops_p,
6975 struct walk_stmt_info *wi)
6977 int *info = (int *) wi->info;
6978 gimple *stmt = gsi_stmt (*gsi_p);
6980 *handled_ops_p = true;
6981 switch (gimple_code (stmt))
6983 WALK_SUBSTMTS;
6985 case GIMPLE_OMP_FOR:
6986 case GIMPLE_OMP_SECTIONS:
6987 *info = *info == 0 ? 1 : -1;
6988 break;
6989 default:
6990 *info = -1;
6991 break;
6993 return NULL;
6996 struct omp_taskcopy_context
6998 /* This field must be at the beginning, as we do "inheritance": Some
6999 callback functions for tree-inline.c (e.g., omp_copy_decl)
7000 receive a copy_body_data pointer that is up-casted to an
7001 omp_context pointer. */
7002 copy_body_data cb;
7003 omp_context *ctx;
7006 static tree
7007 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7009 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7011 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7012 return create_tmp_var (TREE_TYPE (var));
7014 return var;
7017 static tree
7018 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7020 tree name, new_fields = NULL, type, f;
7022 type = lang_hooks.types.make_type (RECORD_TYPE);
7023 name = DECL_NAME (TYPE_NAME (orig_type));
7024 name = build_decl (gimple_location (tcctx->ctx->stmt),
7025 TYPE_DECL, name, type);
7026 TYPE_NAME (type) = name;
7028 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7030 tree new_f = copy_node (f);
7031 DECL_CONTEXT (new_f) = type;
7032 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7033 TREE_CHAIN (new_f) = new_fields;
7034 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7035 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7036 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7037 &tcctx->cb, NULL);
7038 new_fields = new_f;
7039 tcctx->cb.decl_map->put (f, new_f);
7041 TYPE_FIELDS (type) = nreverse (new_fields);
7042 layout_type (type);
7043 return type;
7046 /* Create task copyfn. */
7048 static void
7049 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7051 struct function *child_cfun;
7052 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7053 tree record_type, srecord_type, bind, list;
7054 bool record_needs_remap = false, srecord_needs_remap = false;
7055 splay_tree_node n;
7056 struct omp_taskcopy_context tcctx;
7057 location_t loc = gimple_location (task_stmt);
7059 child_fn = gimple_omp_task_copy_fn (task_stmt);
7060 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7061 gcc_assert (child_cfun->cfg == NULL);
7062 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7064 /* Reset DECL_CONTEXT on function arguments. */
7065 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7066 DECL_CONTEXT (t) = child_fn;
7068 /* Populate the function. */
7069 push_gimplify_context ();
7070 push_cfun (child_cfun);
7072 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7073 TREE_SIDE_EFFECTS (bind) = 1;
7074 list = NULL;
7075 DECL_SAVED_TREE (child_fn) = bind;
7076 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7078 /* Remap src and dst argument types if needed. */
7079 record_type = ctx->record_type;
7080 srecord_type = ctx->srecord_type;
7081 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7082 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7084 record_needs_remap = true;
7085 break;
7087 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7088 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7090 srecord_needs_remap = true;
7091 break;
7094 if (record_needs_remap || srecord_needs_remap)
7096 memset (&tcctx, '\0', sizeof (tcctx));
7097 tcctx.cb.src_fn = ctx->cb.src_fn;
7098 tcctx.cb.dst_fn = child_fn;
7099 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7100 gcc_checking_assert (tcctx.cb.src_node);
7101 tcctx.cb.dst_node = tcctx.cb.src_node;
7102 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7103 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7104 tcctx.cb.eh_lp_nr = 0;
7105 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7106 tcctx.cb.decl_map = new hash_map<tree, tree>;
7107 tcctx.ctx = ctx;
7109 if (record_needs_remap)
7110 record_type = task_copyfn_remap_type (&tcctx, record_type);
7111 if (srecord_needs_remap)
7112 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7114 else
7115 tcctx.cb.decl_map = NULL;
7117 arg = DECL_ARGUMENTS (child_fn);
7118 TREE_TYPE (arg) = build_pointer_type (record_type);
7119 sarg = DECL_CHAIN (arg);
7120 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7122 /* First pass: initialize temporaries used in record_type and srecord_type
7123 sizes and field offsets. */
7124 if (tcctx.cb.decl_map)
7125 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7126 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7128 tree *p;
7130 decl = OMP_CLAUSE_DECL (c);
7131 p = tcctx.cb.decl_map->get (decl);
7132 if (p == NULL)
7133 continue;
7134 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7135 sf = (tree) n->value;
7136 sf = *tcctx.cb.decl_map->get (sf);
7137 src = build_simple_mem_ref_loc (loc, sarg);
7138 src = omp_build_component_ref (src, sf);
7139 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7140 append_to_statement_list (t, &list);
7143 /* Second pass: copy shared var pointers and copy construct non-VLA
7144 firstprivate vars. */
7145 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7146 switch (OMP_CLAUSE_CODE (c))
7148 splay_tree_key key;
7149 case OMP_CLAUSE_SHARED:
7150 decl = OMP_CLAUSE_DECL (c);
7151 key = (splay_tree_key) decl;
7152 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7153 key = (splay_tree_key) &DECL_UID (decl);
7154 n = splay_tree_lookup (ctx->field_map, key);
7155 if (n == NULL)
7156 break;
7157 f = (tree) n->value;
7158 if (tcctx.cb.decl_map)
7159 f = *tcctx.cb.decl_map->get (f);
7160 n = splay_tree_lookup (ctx->sfield_map, key);
7161 sf = (tree) n->value;
7162 if (tcctx.cb.decl_map)
7163 sf = *tcctx.cb.decl_map->get (sf);
7164 src = build_simple_mem_ref_loc (loc, sarg);
7165 src = omp_build_component_ref (src, sf);
7166 dst = build_simple_mem_ref_loc (loc, arg);
7167 dst = omp_build_component_ref (dst, f);
7168 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7169 append_to_statement_list (t, &list);
7170 break;
7171 case OMP_CLAUSE_FIRSTPRIVATE:
7172 decl = OMP_CLAUSE_DECL (c);
7173 if (is_variable_sized (decl))
7174 break;
7175 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7176 if (n == NULL)
7177 break;
7178 f = (tree) n->value;
7179 if (tcctx.cb.decl_map)
7180 f = *tcctx.cb.decl_map->get (f);
7181 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7182 if (n != NULL)
7184 sf = (tree) n->value;
7185 if (tcctx.cb.decl_map)
7186 sf = *tcctx.cb.decl_map->get (sf);
7187 src = build_simple_mem_ref_loc (loc, sarg);
7188 src = omp_build_component_ref (src, sf);
7189 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7190 src = build_simple_mem_ref_loc (loc, src);
7192 else
7193 src = decl;
7194 dst = build_simple_mem_ref_loc (loc, arg);
7195 dst = omp_build_component_ref (dst, f);
7196 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7197 append_to_statement_list (t, &list);
7198 break;
7199 case OMP_CLAUSE_PRIVATE:
7200 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7201 break;
7202 decl = OMP_CLAUSE_DECL (c);
7203 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7204 f = (tree) n->value;
7205 if (tcctx.cb.decl_map)
7206 f = *tcctx.cb.decl_map->get (f);
7207 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7208 if (n != NULL)
7210 sf = (tree) n->value;
7211 if (tcctx.cb.decl_map)
7212 sf = *tcctx.cb.decl_map->get (sf);
7213 src = build_simple_mem_ref_loc (loc, sarg);
7214 src = omp_build_component_ref (src, sf);
7215 if (use_pointer_for_field (decl, NULL))
7216 src = build_simple_mem_ref_loc (loc, src);
7218 else
7219 src = decl;
7220 dst = build_simple_mem_ref_loc (loc, arg);
7221 dst = omp_build_component_ref (dst, f);
7222 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7223 append_to_statement_list (t, &list);
7224 break;
7225 default:
7226 break;
7229 /* Last pass: handle VLA firstprivates. */
7230 if (tcctx.cb.decl_map)
7231 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7232 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7234 tree ind, ptr, df;
7236 decl = OMP_CLAUSE_DECL (c);
7237 if (!is_variable_sized (decl))
7238 continue;
7239 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7240 if (n == NULL)
7241 continue;
7242 f = (tree) n->value;
7243 f = *tcctx.cb.decl_map->get (f);
7244 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7245 ind = DECL_VALUE_EXPR (decl);
7246 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7247 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7248 n = splay_tree_lookup (ctx->sfield_map,
7249 (splay_tree_key) TREE_OPERAND (ind, 0));
7250 sf = (tree) n->value;
7251 sf = *tcctx.cb.decl_map->get (sf);
7252 src = build_simple_mem_ref_loc (loc, sarg);
7253 src = omp_build_component_ref (src, sf);
7254 src = build_simple_mem_ref_loc (loc, src);
7255 dst = build_simple_mem_ref_loc (loc, arg);
7256 dst = omp_build_component_ref (dst, f);
7257 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7258 append_to_statement_list (t, &list);
7259 n = splay_tree_lookup (ctx->field_map,
7260 (splay_tree_key) TREE_OPERAND (ind, 0));
7261 df = (tree) n->value;
7262 df = *tcctx.cb.decl_map->get (df);
7263 ptr = build_simple_mem_ref_loc (loc, arg);
7264 ptr = omp_build_component_ref (ptr, df);
7265 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7266 build_fold_addr_expr_loc (loc, dst));
7267 append_to_statement_list (t, &list);
7270 t = build1 (RETURN_EXPR, void_type_node, NULL);
7271 append_to_statement_list (t, &list);
7273 if (tcctx.cb.decl_map)
7274 delete tcctx.cb.decl_map;
7275 pop_gimplify_context (NULL);
7276 BIND_EXPR_BODY (bind) = list;
7277 pop_cfun ();
7280 static void
7281 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7283 tree c, clauses;
7284 gimple *g;
7285 size_t n_in = 0, n_out = 0, idx = 2, i;
7287 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7288 gcc_assert (clauses);
7289 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7290 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7291 switch (OMP_CLAUSE_DEPEND_KIND (c))
7293 case OMP_CLAUSE_DEPEND_IN:
7294 n_in++;
7295 break;
7296 case OMP_CLAUSE_DEPEND_OUT:
7297 case OMP_CLAUSE_DEPEND_INOUT:
7298 n_out++;
7299 break;
7300 case OMP_CLAUSE_DEPEND_SOURCE:
7301 case OMP_CLAUSE_DEPEND_SINK:
7302 /* FALLTHRU */
7303 default:
7304 gcc_unreachable ();
7306 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7307 tree array = create_tmp_var (type);
7308 TREE_ADDRESSABLE (array) = 1;
7309 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7310 NULL_TREE);
7311 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7312 gimple_seq_add_stmt (iseq, g);
7313 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7314 NULL_TREE);
7315 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7316 gimple_seq_add_stmt (iseq, g);
7317 for (i = 0; i < 2; i++)
7319 if ((i ? n_in : n_out) == 0)
7320 continue;
7321 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7322 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7323 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7325 tree t = OMP_CLAUSE_DECL (c);
7326 t = fold_convert (ptr_type_node, t);
7327 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7328 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7329 NULL_TREE, NULL_TREE);
7330 g = gimple_build_assign (r, t);
7331 gimple_seq_add_stmt (iseq, g);
7334 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7335 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7336 OMP_CLAUSE_CHAIN (c) = *pclauses;
7337 *pclauses = c;
7338 tree clobber = build_constructor (type, NULL);
7339 TREE_THIS_VOLATILE (clobber) = 1;
7340 g = gimple_build_assign (array, clobber);
7341 gimple_seq_add_stmt (oseq, g);
7344 /* Lower the OpenMP parallel or task directive in the current statement
7345 in GSI_P. CTX holds context information for the directive. */
7347 static void
7348 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7350 tree clauses;
7351 tree child_fn, t;
7352 gimple *stmt = gsi_stmt (*gsi_p);
7353 gbind *par_bind, *bind, *dep_bind = NULL;
7354 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7355 location_t loc = gimple_location (stmt);
7357 clauses = gimple_omp_taskreg_clauses (stmt);
7358 par_bind
7359 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7360 par_body = gimple_bind_body (par_bind);
7361 child_fn = ctx->cb.dst_fn;
7362 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7363 && !gimple_omp_parallel_combined_p (stmt))
7365 struct walk_stmt_info wi;
7366 int ws_num = 0;
7368 memset (&wi, 0, sizeof (wi));
7369 wi.info = &ws_num;
7370 wi.val_only = true;
7371 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7372 if (ws_num == 1)
7373 gimple_omp_parallel_set_combined_p (stmt, true);
7375 gimple_seq dep_ilist = NULL;
7376 gimple_seq dep_olist = NULL;
7377 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7378 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7380 push_gimplify_context ();
7381 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7382 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7383 &dep_ilist, &dep_olist);
7386 if (ctx->srecord_type)
7387 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7389 push_gimplify_context ();
7391 par_olist = NULL;
7392 par_ilist = NULL;
7393 par_rlist = NULL;
7394 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7395 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7396 if (phony_construct && ctx->record_type)
7398 gcc_checking_assert (!ctx->receiver_decl);
7399 ctx->receiver_decl = create_tmp_var
7400 (build_reference_type (ctx->record_type), ".omp_rec");
7402 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7403 lower_omp (&par_body, ctx);
7404 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7405 lower_reduction_clauses (clauses, &par_rlist, ctx);
7407 /* Declare all the variables created by mapping and the variables
7408 declared in the scope of the parallel body. */
7409 record_vars_into (ctx->block_vars, child_fn);
7410 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7412 if (ctx->record_type)
7414 ctx->sender_decl
7415 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7416 : ctx->record_type, ".omp_data_o");
7417 DECL_NAMELESS (ctx->sender_decl) = 1;
7418 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7419 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7422 olist = NULL;
7423 ilist = NULL;
7424 lower_send_clauses (clauses, &ilist, &olist, ctx);
7425 lower_send_shared_vars (&ilist, &olist, ctx);
7427 if (ctx->record_type)
7429 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7430 TREE_THIS_VOLATILE (clobber) = 1;
7431 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7432 clobber));
7435 /* Once all the expansions are done, sequence all the different
7436 fragments inside gimple_omp_body. */
7438 new_body = NULL;
7440 if (ctx->record_type)
7442 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7443 /* fixup_child_record_type might have changed receiver_decl's type. */
7444 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7445 gimple_seq_add_stmt (&new_body,
7446 gimple_build_assign (ctx->receiver_decl, t));
7449 gimple_seq_add_seq (&new_body, par_ilist);
7450 gimple_seq_add_seq (&new_body, par_body);
7451 gimple_seq_add_seq (&new_body, par_rlist);
7452 if (ctx->cancellable)
7453 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7454 gimple_seq_add_seq (&new_body, par_olist);
7455 new_body = maybe_catch_exception (new_body);
7456 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7457 gimple_seq_add_stmt (&new_body,
7458 gimple_build_omp_continue (integer_zero_node,
7459 integer_zero_node));
7460 if (!phony_construct)
7462 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7463 gimple_omp_set_body (stmt, new_body);
7466 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7467 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7468 gimple_bind_add_seq (bind, ilist);
7469 if (!phony_construct)
7470 gimple_bind_add_stmt (bind, stmt);
7471 else
7472 gimple_bind_add_seq (bind, new_body);
7473 gimple_bind_add_seq (bind, olist);
7475 pop_gimplify_context (NULL);
7477 if (dep_bind)
7479 gimple_bind_add_seq (dep_bind, dep_ilist);
7480 gimple_bind_add_stmt (dep_bind, bind);
7481 gimple_bind_add_seq (dep_bind, dep_olist);
7482 pop_gimplify_context (dep_bind);
7486 /* Lower the GIMPLE_OMP_TARGET in the current statement
7487 in GSI_P. CTX holds context information for the directive. */
7489 static void
7490 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7492 tree clauses;
7493 tree child_fn, t, c;
7494 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7495 gbind *tgt_bind, *bind, *dep_bind = NULL;
7496 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7497 location_t loc = gimple_location (stmt);
7498 bool offloaded, data_region;
7499 unsigned int map_cnt = 0;
7501 offloaded = is_gimple_omp_offloaded (stmt);
7502 switch (gimple_omp_target_kind (stmt))
7504 case GF_OMP_TARGET_KIND_REGION:
7505 case GF_OMP_TARGET_KIND_UPDATE:
7506 case GF_OMP_TARGET_KIND_ENTER_DATA:
7507 case GF_OMP_TARGET_KIND_EXIT_DATA:
7508 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7509 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7510 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7511 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7512 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7513 data_region = false;
7514 break;
7515 case GF_OMP_TARGET_KIND_DATA:
7516 case GF_OMP_TARGET_KIND_OACC_DATA:
7517 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7518 data_region = true;
7519 break;
7520 default:
7521 gcc_unreachable ();
7524 clauses = gimple_omp_target_clauses (stmt);
7526 gimple_seq dep_ilist = NULL;
7527 gimple_seq dep_olist = NULL;
7528 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7530 push_gimplify_context ();
7531 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7532 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7533 &dep_ilist, &dep_olist);
7536 tgt_bind = NULL;
7537 tgt_body = NULL;
7538 if (offloaded)
7540 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7541 tgt_body = gimple_bind_body (tgt_bind);
7543 else if (data_region)
7544 tgt_body = gimple_omp_body (stmt);
7545 child_fn = ctx->cb.dst_fn;
7547 push_gimplify_context ();
7548 fplist = NULL;
7550 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7551 switch (OMP_CLAUSE_CODE (c))
7553 tree var, x;
7555 default:
7556 break;
7557 case OMP_CLAUSE_MAP:
7558 #if CHECKING_P
7559 /* First check what we're prepared to handle in the following. */
7560 switch (OMP_CLAUSE_MAP_KIND (c))
7562 case GOMP_MAP_ALLOC:
7563 case GOMP_MAP_TO:
7564 case GOMP_MAP_FROM:
7565 case GOMP_MAP_TOFROM:
7566 case GOMP_MAP_POINTER:
7567 case GOMP_MAP_TO_PSET:
7568 case GOMP_MAP_DELETE:
7569 case GOMP_MAP_RELEASE:
7570 case GOMP_MAP_ALWAYS_TO:
7571 case GOMP_MAP_ALWAYS_FROM:
7572 case GOMP_MAP_ALWAYS_TOFROM:
7573 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7574 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7575 case GOMP_MAP_STRUCT:
7576 case GOMP_MAP_ALWAYS_POINTER:
7577 break;
7578 case GOMP_MAP_FORCE_ALLOC:
7579 case GOMP_MAP_FORCE_TO:
7580 case GOMP_MAP_FORCE_FROM:
7581 case GOMP_MAP_FORCE_TOFROM:
7582 case GOMP_MAP_FORCE_PRESENT:
7583 case GOMP_MAP_FORCE_DEVICEPTR:
7584 case GOMP_MAP_DEVICE_RESIDENT:
7585 case GOMP_MAP_LINK:
7586 gcc_assert (is_gimple_omp_oacc (stmt));
7587 break;
7588 default:
7589 gcc_unreachable ();
7591 #endif
7592 /* FALLTHRU */
7593 case OMP_CLAUSE_TO:
7594 case OMP_CLAUSE_FROM:
7595 oacc_firstprivate:
7596 var = OMP_CLAUSE_DECL (c);
7597 if (!DECL_P (var))
7599 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7600 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7601 && (OMP_CLAUSE_MAP_KIND (c)
7602 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7603 map_cnt++;
7604 continue;
7607 if (DECL_SIZE (var)
7608 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7610 tree var2 = DECL_VALUE_EXPR (var);
7611 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7612 var2 = TREE_OPERAND (var2, 0);
7613 gcc_assert (DECL_P (var2));
7614 var = var2;
7617 if (offloaded
7618 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7619 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7620 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7622 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7624 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7625 && varpool_node::get_create (var)->offloadable)
7626 continue;
7628 tree type = build_pointer_type (TREE_TYPE (var));
7629 tree new_var = lookup_decl (var, ctx);
7630 x = create_tmp_var_raw (type, get_name (new_var));
7631 gimple_add_tmp_var (x);
7632 x = build_simple_mem_ref (x);
7633 SET_DECL_VALUE_EXPR (new_var, x);
7634 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7636 continue;
7639 if (!maybe_lookup_field (var, ctx))
7640 continue;
7642 /* Don't remap oacc parallel reduction variables, because the
7643 intermediate result must be local to each gang. */
7644 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7645 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7647 x = build_receiver_ref (var, true, ctx);
7648 tree new_var = lookup_decl (var, ctx);
7650 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7651 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7652 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7653 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7654 x = build_simple_mem_ref (x);
7655 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7657 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7658 if (omp_is_reference (new_var))
7660 /* Create a local object to hold the instance
7661 value. */
7662 tree type = TREE_TYPE (TREE_TYPE (new_var));
7663 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7664 tree inst = create_tmp_var (type, id);
7665 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7666 x = build_fold_addr_expr (inst);
7668 gimplify_assign (new_var, x, &fplist);
7670 else if (DECL_P (new_var))
7672 SET_DECL_VALUE_EXPR (new_var, x);
7673 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7675 else
7676 gcc_unreachable ();
7678 map_cnt++;
7679 break;
7681 case OMP_CLAUSE_FIRSTPRIVATE:
7682 if (is_oacc_parallel (ctx))
7683 goto oacc_firstprivate;
7684 map_cnt++;
7685 var = OMP_CLAUSE_DECL (c);
7686 if (!omp_is_reference (var)
7687 && !is_gimple_reg_type (TREE_TYPE (var)))
7689 tree new_var = lookup_decl (var, ctx);
7690 if (is_variable_sized (var))
7692 tree pvar = DECL_VALUE_EXPR (var);
7693 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7694 pvar = TREE_OPERAND (pvar, 0);
7695 gcc_assert (DECL_P (pvar));
7696 tree new_pvar = lookup_decl (pvar, ctx);
7697 x = build_fold_indirect_ref (new_pvar);
7698 TREE_THIS_NOTRAP (x) = 1;
7700 else
7701 x = build_receiver_ref (var, true, ctx);
7702 SET_DECL_VALUE_EXPR (new_var, x);
7703 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7705 break;
7707 case OMP_CLAUSE_PRIVATE:
7708 if (is_gimple_omp_oacc (ctx->stmt))
7709 break;
7710 var = OMP_CLAUSE_DECL (c);
7711 if (is_variable_sized (var))
7713 tree new_var = lookup_decl (var, ctx);
7714 tree pvar = DECL_VALUE_EXPR (var);
7715 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7716 pvar = TREE_OPERAND (pvar, 0);
7717 gcc_assert (DECL_P (pvar));
7718 tree new_pvar = lookup_decl (pvar, ctx);
7719 x = build_fold_indirect_ref (new_pvar);
7720 TREE_THIS_NOTRAP (x) = 1;
7721 SET_DECL_VALUE_EXPR (new_var, x);
7722 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7724 break;
7726 case OMP_CLAUSE_USE_DEVICE_PTR:
7727 case OMP_CLAUSE_IS_DEVICE_PTR:
7728 var = OMP_CLAUSE_DECL (c);
7729 map_cnt++;
7730 if (is_variable_sized (var))
7732 tree new_var = lookup_decl (var, ctx);
7733 tree pvar = DECL_VALUE_EXPR (var);
7734 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7735 pvar = TREE_OPERAND (pvar, 0);
7736 gcc_assert (DECL_P (pvar));
7737 tree new_pvar = lookup_decl (pvar, ctx);
7738 x = build_fold_indirect_ref (new_pvar);
7739 TREE_THIS_NOTRAP (x) = 1;
7740 SET_DECL_VALUE_EXPR (new_var, x);
7741 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7743 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7745 tree new_var = lookup_decl (var, ctx);
7746 tree type = build_pointer_type (TREE_TYPE (var));
7747 x = create_tmp_var_raw (type, get_name (new_var));
7748 gimple_add_tmp_var (x);
7749 x = build_simple_mem_ref (x);
7750 SET_DECL_VALUE_EXPR (new_var, x);
7751 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7753 else
7755 tree new_var = lookup_decl (var, ctx);
7756 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7757 gimple_add_tmp_var (x);
7758 SET_DECL_VALUE_EXPR (new_var, x);
7759 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7761 break;
7764 if (offloaded)
7766 target_nesting_level++;
7767 lower_omp (&tgt_body, ctx);
7768 target_nesting_level--;
7770 else if (data_region)
7771 lower_omp (&tgt_body, ctx);
7773 if (offloaded)
7775 /* Declare all the variables created by mapping and the variables
7776 declared in the scope of the target body. */
7777 record_vars_into (ctx->block_vars, child_fn);
7778 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7781 olist = NULL;
7782 ilist = NULL;
7783 if (ctx->record_type)
7785 ctx->sender_decl
7786 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7787 DECL_NAMELESS (ctx->sender_decl) = 1;
7788 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7789 t = make_tree_vec (3);
7790 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7791 TREE_VEC_ELT (t, 1)
7792 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7793 ".omp_data_sizes");
7794 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7795 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7796 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7797 tree tkind_type = short_unsigned_type_node;
7798 int talign_shift = 8;
7799 TREE_VEC_ELT (t, 2)
7800 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7801 ".omp_data_kinds");
7802 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7803 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7804 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7805 gimple_omp_target_set_data_arg (stmt, t);
7807 vec<constructor_elt, va_gc> *vsize;
7808 vec<constructor_elt, va_gc> *vkind;
7809 vec_alloc (vsize, map_cnt);
7810 vec_alloc (vkind, map_cnt);
7811 unsigned int map_idx = 0;
7813 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7814 switch (OMP_CLAUSE_CODE (c))
7816 tree ovar, nc, s, purpose, var, x, type;
7817 unsigned int talign;
7819 default:
7820 break;
7822 case OMP_CLAUSE_MAP:
7823 case OMP_CLAUSE_TO:
7824 case OMP_CLAUSE_FROM:
7825 oacc_firstprivate_map:
7826 nc = c;
7827 ovar = OMP_CLAUSE_DECL (c);
7828 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7829 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7830 || (OMP_CLAUSE_MAP_KIND (c)
7831 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7832 break;
7833 if (!DECL_P (ovar))
7835 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7836 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7838 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7839 == get_base_address (ovar));
7840 nc = OMP_CLAUSE_CHAIN (c);
7841 ovar = OMP_CLAUSE_DECL (nc);
7843 else
7845 tree x = build_sender_ref (ovar, ctx);
7846 tree v
7847 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7848 gimplify_assign (x, v, &ilist);
7849 nc = NULL_TREE;
7852 else
7854 if (DECL_SIZE (ovar)
7855 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7857 tree ovar2 = DECL_VALUE_EXPR (ovar);
7858 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7859 ovar2 = TREE_OPERAND (ovar2, 0);
7860 gcc_assert (DECL_P (ovar2));
7861 ovar = ovar2;
7863 if (!maybe_lookup_field (ovar, ctx))
7864 continue;
7867 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7868 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7869 talign = DECL_ALIGN_UNIT (ovar);
7870 if (nc)
7872 var = lookup_decl_in_outer_ctx (ovar, ctx);
7873 x = build_sender_ref (ovar, ctx);
7875 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7876 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7877 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7878 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7880 gcc_assert (offloaded);
7881 tree avar
7882 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7883 mark_addressable (avar);
7884 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7885 talign = DECL_ALIGN_UNIT (avar);
7886 avar = build_fold_addr_expr (avar);
7887 gimplify_assign (x, avar, &ilist);
7889 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7891 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7892 if (!omp_is_reference (var))
7894 if (is_gimple_reg (var)
7895 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7896 TREE_NO_WARNING (var) = 1;
7897 var = build_fold_addr_expr (var);
7899 else
7900 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7901 gimplify_assign (x, var, &ilist);
7903 else if (is_gimple_reg (var))
7905 gcc_assert (offloaded);
7906 tree avar = create_tmp_var (TREE_TYPE (var));
7907 mark_addressable (avar);
7908 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7909 if (GOMP_MAP_COPY_TO_P (map_kind)
7910 || map_kind == GOMP_MAP_POINTER
7911 || map_kind == GOMP_MAP_TO_PSET
7912 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7914 /* If we need to initialize a temporary
7915 with VAR because it is not addressable, and
7916 the variable hasn't been initialized yet, then
7917 we'll get a warning for the store to avar.
7918 Don't warn in that case, the mapping might
7919 be implicit. */
7920 TREE_NO_WARNING (var) = 1;
7921 gimplify_assign (avar, var, &ilist);
7923 avar = build_fold_addr_expr (avar);
7924 gimplify_assign (x, avar, &ilist);
7925 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7926 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7927 && !TYPE_READONLY (TREE_TYPE (var)))
7929 x = unshare_expr (x);
7930 x = build_simple_mem_ref (x);
7931 gimplify_assign (var, x, &olist);
7934 else
7936 var = build_fold_addr_expr (var);
7937 gimplify_assign (x, var, &ilist);
7940 s = NULL_TREE;
7941 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7943 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7944 s = TREE_TYPE (ovar);
7945 if (TREE_CODE (s) == REFERENCE_TYPE)
7946 s = TREE_TYPE (s);
7947 s = TYPE_SIZE_UNIT (s);
7949 else
7950 s = OMP_CLAUSE_SIZE (c);
7951 if (s == NULL_TREE)
7952 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7953 s = fold_convert (size_type_node, s);
7954 purpose = size_int (map_idx++);
7955 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7956 if (TREE_CODE (s) != INTEGER_CST)
7957 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7959 unsigned HOST_WIDE_INT tkind, tkind_zero;
7960 switch (OMP_CLAUSE_CODE (c))
7962 case OMP_CLAUSE_MAP:
7963 tkind = OMP_CLAUSE_MAP_KIND (c);
7964 tkind_zero = tkind;
7965 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7966 switch (tkind)
7968 case GOMP_MAP_ALLOC:
7969 case GOMP_MAP_TO:
7970 case GOMP_MAP_FROM:
7971 case GOMP_MAP_TOFROM:
7972 case GOMP_MAP_ALWAYS_TO:
7973 case GOMP_MAP_ALWAYS_FROM:
7974 case GOMP_MAP_ALWAYS_TOFROM:
7975 case GOMP_MAP_RELEASE:
7976 case GOMP_MAP_FORCE_TO:
7977 case GOMP_MAP_FORCE_FROM:
7978 case GOMP_MAP_FORCE_TOFROM:
7979 case GOMP_MAP_FORCE_PRESENT:
7980 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7981 break;
7982 case GOMP_MAP_DELETE:
7983 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7984 default:
7985 break;
7987 if (tkind_zero != tkind)
7989 if (integer_zerop (s))
7990 tkind = tkind_zero;
7991 else if (integer_nonzerop (s))
7992 tkind_zero = tkind;
7994 break;
7995 case OMP_CLAUSE_FIRSTPRIVATE:
7996 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7997 tkind = GOMP_MAP_TO;
7998 tkind_zero = tkind;
7999 break;
8000 case OMP_CLAUSE_TO:
8001 tkind = GOMP_MAP_TO;
8002 tkind_zero = tkind;
8003 break;
8004 case OMP_CLAUSE_FROM:
8005 tkind = GOMP_MAP_FROM;
8006 tkind_zero = tkind;
8007 break;
8008 default:
8009 gcc_unreachable ();
8011 gcc_checking_assert (tkind
8012 < (HOST_WIDE_INT_C (1U) << talign_shift));
8013 gcc_checking_assert (tkind_zero
8014 < (HOST_WIDE_INT_C (1U) << talign_shift));
8015 talign = ceil_log2 (talign);
8016 tkind |= talign << talign_shift;
8017 tkind_zero |= talign << talign_shift;
8018 gcc_checking_assert (tkind
8019 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8020 gcc_checking_assert (tkind_zero
8021 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8022 if (tkind == tkind_zero)
8023 x = build_int_cstu (tkind_type, tkind);
8024 else
8026 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8027 x = build3 (COND_EXPR, tkind_type,
8028 fold_build2 (EQ_EXPR, boolean_type_node,
8029 unshare_expr (s), size_zero_node),
8030 build_int_cstu (tkind_type, tkind_zero),
8031 build_int_cstu (tkind_type, tkind));
8033 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8034 if (nc && nc != c)
8035 c = nc;
8036 break;
8038 case OMP_CLAUSE_FIRSTPRIVATE:
8039 if (is_oacc_parallel (ctx))
8040 goto oacc_firstprivate_map;
8041 ovar = OMP_CLAUSE_DECL (c);
8042 if (omp_is_reference (ovar))
8043 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8044 else
8045 talign = DECL_ALIGN_UNIT (ovar);
8046 var = lookup_decl_in_outer_ctx (ovar, ctx);
8047 x = build_sender_ref (ovar, ctx);
8048 tkind = GOMP_MAP_FIRSTPRIVATE;
8049 type = TREE_TYPE (ovar);
8050 if (omp_is_reference (ovar))
8051 type = TREE_TYPE (type);
8052 if ((INTEGRAL_TYPE_P (type)
8053 && TYPE_PRECISION (type) <= POINTER_SIZE)
8054 || TREE_CODE (type) == POINTER_TYPE)
8056 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8057 tree t = var;
8058 if (omp_is_reference (var))
8059 t = build_simple_mem_ref (var);
8060 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8061 TREE_NO_WARNING (var) = 1;
8062 if (TREE_CODE (type) != POINTER_TYPE)
8063 t = fold_convert (pointer_sized_int_node, t);
8064 t = fold_convert (TREE_TYPE (x), t);
8065 gimplify_assign (x, t, &ilist);
8067 else if (omp_is_reference (var))
8068 gimplify_assign (x, var, &ilist);
8069 else if (is_gimple_reg (var))
8071 tree avar = create_tmp_var (TREE_TYPE (var));
8072 mark_addressable (avar);
8073 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8074 TREE_NO_WARNING (var) = 1;
8075 gimplify_assign (avar, var, &ilist);
8076 avar = build_fold_addr_expr (avar);
8077 gimplify_assign (x, avar, &ilist);
8079 else
8081 var = build_fold_addr_expr (var);
8082 gimplify_assign (x, var, &ilist);
8084 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8085 s = size_int (0);
8086 else if (omp_is_reference (ovar))
8087 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8088 else
8089 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8090 s = fold_convert (size_type_node, s);
8091 purpose = size_int (map_idx++);
8092 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8093 if (TREE_CODE (s) != INTEGER_CST)
8094 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8096 gcc_checking_assert (tkind
8097 < (HOST_WIDE_INT_C (1U) << talign_shift));
8098 talign = ceil_log2 (talign);
8099 tkind |= talign << talign_shift;
8100 gcc_checking_assert (tkind
8101 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8102 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8103 build_int_cstu (tkind_type, tkind));
8104 break;
8106 case OMP_CLAUSE_USE_DEVICE_PTR:
8107 case OMP_CLAUSE_IS_DEVICE_PTR:
8108 ovar = OMP_CLAUSE_DECL (c);
8109 var = lookup_decl_in_outer_ctx (ovar, ctx);
8110 x = build_sender_ref (ovar, ctx);
8111 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8112 tkind = GOMP_MAP_USE_DEVICE_PTR;
8113 else
8114 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8115 type = TREE_TYPE (ovar);
8116 if (TREE_CODE (type) == ARRAY_TYPE)
8117 var = build_fold_addr_expr (var);
8118 else
8120 if (omp_is_reference (ovar))
8122 type = TREE_TYPE (type);
8123 if (TREE_CODE (type) != ARRAY_TYPE)
8124 var = build_simple_mem_ref (var);
8125 var = fold_convert (TREE_TYPE (x), var);
8128 gimplify_assign (x, var, &ilist);
8129 s = size_int (0);
8130 purpose = size_int (map_idx++);
8131 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8132 gcc_checking_assert (tkind
8133 < (HOST_WIDE_INT_C (1U) << talign_shift));
8134 gcc_checking_assert (tkind
8135 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8136 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8137 build_int_cstu (tkind_type, tkind));
8138 break;
8141 gcc_assert (map_idx == map_cnt);
8143 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8144 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8145 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8146 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8147 for (int i = 1; i <= 2; i++)
8148 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8150 gimple_seq initlist = NULL;
8151 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8152 TREE_VEC_ELT (t, i)),
8153 &initlist, true, NULL_TREE);
8154 gimple_seq_add_seq (&ilist, initlist);
8156 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8157 NULL);
8158 TREE_THIS_VOLATILE (clobber) = 1;
8159 gimple_seq_add_stmt (&olist,
8160 gimple_build_assign (TREE_VEC_ELT (t, i),
8161 clobber));
8164 tree clobber = build_constructor (ctx->record_type, NULL);
8165 TREE_THIS_VOLATILE (clobber) = 1;
8166 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8167 clobber));
8170 /* Once all the expansions are done, sequence all the different
8171 fragments inside gimple_omp_body. */
8173 new_body = NULL;
8175 if (offloaded
8176 && ctx->record_type)
8178 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8179 /* fixup_child_record_type might have changed receiver_decl's type. */
8180 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8181 gimple_seq_add_stmt (&new_body,
8182 gimple_build_assign (ctx->receiver_decl, t));
8184 gimple_seq_add_seq (&new_body, fplist);
8186 if (offloaded || data_region)
8188 tree prev = NULL_TREE;
8189 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8190 switch (OMP_CLAUSE_CODE (c))
8192 tree var, x;
8193 default:
8194 break;
8195 case OMP_CLAUSE_FIRSTPRIVATE:
8196 if (is_gimple_omp_oacc (ctx->stmt))
8197 break;
8198 var = OMP_CLAUSE_DECL (c);
8199 if (omp_is_reference (var)
8200 || is_gimple_reg_type (TREE_TYPE (var)))
8202 tree new_var = lookup_decl (var, ctx);
8203 tree type;
8204 type = TREE_TYPE (var);
8205 if (omp_is_reference (var))
8206 type = TREE_TYPE (type);
8207 if ((INTEGRAL_TYPE_P (type)
8208 && TYPE_PRECISION (type) <= POINTER_SIZE)
8209 || TREE_CODE (type) == POINTER_TYPE)
8211 x = build_receiver_ref (var, false, ctx);
8212 if (TREE_CODE (type) != POINTER_TYPE)
8213 x = fold_convert (pointer_sized_int_node, x);
8214 x = fold_convert (type, x);
8215 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8216 fb_rvalue);
8217 if (omp_is_reference (var))
8219 tree v = create_tmp_var_raw (type, get_name (var));
8220 gimple_add_tmp_var (v);
8221 TREE_ADDRESSABLE (v) = 1;
8222 gimple_seq_add_stmt (&new_body,
8223 gimple_build_assign (v, x));
8224 x = build_fold_addr_expr (v);
8226 gimple_seq_add_stmt (&new_body,
8227 gimple_build_assign (new_var, x));
8229 else
8231 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8232 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8233 fb_rvalue);
8234 gimple_seq_add_stmt (&new_body,
8235 gimple_build_assign (new_var, x));
8238 else if (is_variable_sized (var))
8240 tree pvar = DECL_VALUE_EXPR (var);
8241 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8242 pvar = TREE_OPERAND (pvar, 0);
8243 gcc_assert (DECL_P (pvar));
8244 tree new_var = lookup_decl (pvar, ctx);
8245 x = build_receiver_ref (var, false, ctx);
8246 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8247 gimple_seq_add_stmt (&new_body,
8248 gimple_build_assign (new_var, x));
8250 break;
8251 case OMP_CLAUSE_PRIVATE:
8252 if (is_gimple_omp_oacc (ctx->stmt))
8253 break;
8254 var = OMP_CLAUSE_DECL (c);
8255 if (omp_is_reference (var))
8257 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8258 tree new_var = lookup_decl (var, ctx);
8259 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8260 if (TREE_CONSTANT (x))
8262 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8263 get_name (var));
8264 gimple_add_tmp_var (x);
8265 TREE_ADDRESSABLE (x) = 1;
8266 x = build_fold_addr_expr_loc (clause_loc, x);
8268 else
8269 break;
8271 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8272 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8273 gimple_seq_add_stmt (&new_body,
8274 gimple_build_assign (new_var, x));
8276 break;
8277 case OMP_CLAUSE_USE_DEVICE_PTR:
8278 case OMP_CLAUSE_IS_DEVICE_PTR:
8279 var = OMP_CLAUSE_DECL (c);
8280 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8281 x = build_sender_ref (var, ctx);
8282 else
8283 x = build_receiver_ref (var, false, ctx);
8284 if (is_variable_sized (var))
8286 tree pvar = DECL_VALUE_EXPR (var);
8287 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8288 pvar = TREE_OPERAND (pvar, 0);
8289 gcc_assert (DECL_P (pvar));
8290 tree new_var = lookup_decl (pvar, ctx);
8291 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8292 gimple_seq_add_stmt (&new_body,
8293 gimple_build_assign (new_var, x));
8295 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8297 tree new_var = lookup_decl (var, ctx);
8298 new_var = DECL_VALUE_EXPR (new_var);
8299 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8300 new_var = TREE_OPERAND (new_var, 0);
8301 gcc_assert (DECL_P (new_var));
8302 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8303 gimple_seq_add_stmt (&new_body,
8304 gimple_build_assign (new_var, x));
8306 else
8308 tree type = TREE_TYPE (var);
8309 tree new_var = lookup_decl (var, ctx);
8310 if (omp_is_reference (var))
8312 type = TREE_TYPE (type);
8313 if (TREE_CODE (type) != ARRAY_TYPE)
8315 tree v = create_tmp_var_raw (type, get_name (var));
8316 gimple_add_tmp_var (v);
8317 TREE_ADDRESSABLE (v) = 1;
8318 x = fold_convert (type, x);
8319 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8320 fb_rvalue);
8321 gimple_seq_add_stmt (&new_body,
8322 gimple_build_assign (v, x));
8323 x = build_fold_addr_expr (v);
8326 new_var = DECL_VALUE_EXPR (new_var);
8327 x = fold_convert (TREE_TYPE (new_var), x);
8328 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8329 gimple_seq_add_stmt (&new_body,
8330 gimple_build_assign (new_var, x));
8332 break;
8334 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8335 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8336 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8337 or references to VLAs. */
8338 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8339 switch (OMP_CLAUSE_CODE (c))
8341 tree var;
8342 default:
8343 break;
8344 case OMP_CLAUSE_MAP:
8345 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8346 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8348 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8349 HOST_WIDE_INT offset = 0;
8350 gcc_assert (prev);
8351 var = OMP_CLAUSE_DECL (c);
8352 if (DECL_P (var)
8353 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8354 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8355 ctx))
8356 && varpool_node::get_create (var)->offloadable)
8357 break;
8358 if (TREE_CODE (var) == INDIRECT_REF
8359 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8360 var = TREE_OPERAND (var, 0);
8361 if (TREE_CODE (var) == COMPONENT_REF)
8363 var = get_addr_base_and_unit_offset (var, &offset);
8364 gcc_assert (var != NULL_TREE && DECL_P (var));
8366 else if (DECL_SIZE (var)
8367 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8369 tree var2 = DECL_VALUE_EXPR (var);
8370 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8371 var2 = TREE_OPERAND (var2, 0);
8372 gcc_assert (DECL_P (var2));
8373 var = var2;
8375 tree new_var = lookup_decl (var, ctx), x;
8376 tree type = TREE_TYPE (new_var);
8377 bool is_ref;
8378 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8379 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8380 == COMPONENT_REF))
8382 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8383 is_ref = true;
8384 new_var = build2 (MEM_REF, type,
8385 build_fold_addr_expr (new_var),
8386 build_int_cst (build_pointer_type (type),
8387 offset));
8389 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8391 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8392 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8393 new_var = build2 (MEM_REF, type,
8394 build_fold_addr_expr (new_var),
8395 build_int_cst (build_pointer_type (type),
8396 offset));
8398 else
8399 is_ref = omp_is_reference (var);
8400 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8401 is_ref = false;
8402 bool ref_to_array = false;
8403 if (is_ref)
8405 type = TREE_TYPE (type);
8406 if (TREE_CODE (type) == ARRAY_TYPE)
8408 type = build_pointer_type (type);
8409 ref_to_array = true;
8412 else if (TREE_CODE (type) == ARRAY_TYPE)
8414 tree decl2 = DECL_VALUE_EXPR (new_var);
8415 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8416 decl2 = TREE_OPERAND (decl2, 0);
8417 gcc_assert (DECL_P (decl2));
8418 new_var = decl2;
8419 type = TREE_TYPE (new_var);
8421 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8422 x = fold_convert_loc (clause_loc, type, x);
8423 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8425 tree bias = OMP_CLAUSE_SIZE (c);
8426 if (DECL_P (bias))
8427 bias = lookup_decl (bias, ctx);
8428 bias = fold_convert_loc (clause_loc, sizetype, bias);
8429 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8430 bias);
8431 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8432 TREE_TYPE (x), x, bias);
8434 if (ref_to_array)
8435 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8436 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8437 if (is_ref && !ref_to_array)
8439 tree t = create_tmp_var_raw (type, get_name (var));
8440 gimple_add_tmp_var (t);
8441 TREE_ADDRESSABLE (t) = 1;
8442 gimple_seq_add_stmt (&new_body,
8443 gimple_build_assign (t, x));
8444 x = build_fold_addr_expr_loc (clause_loc, t);
8446 gimple_seq_add_stmt (&new_body,
8447 gimple_build_assign (new_var, x));
8448 prev = NULL_TREE;
8450 else if (OMP_CLAUSE_CHAIN (c)
8451 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8452 == OMP_CLAUSE_MAP
8453 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8454 == GOMP_MAP_FIRSTPRIVATE_POINTER
8455 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8456 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8457 prev = c;
8458 break;
8459 case OMP_CLAUSE_PRIVATE:
8460 var = OMP_CLAUSE_DECL (c);
8461 if (is_variable_sized (var))
8463 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8464 tree new_var = lookup_decl (var, ctx);
8465 tree pvar = DECL_VALUE_EXPR (var);
8466 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8467 pvar = TREE_OPERAND (pvar, 0);
8468 gcc_assert (DECL_P (pvar));
8469 tree new_pvar = lookup_decl (pvar, ctx);
8470 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8471 tree al = size_int (DECL_ALIGN (var));
8472 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8473 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8474 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8475 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8476 gimple_seq_add_stmt (&new_body,
8477 gimple_build_assign (new_pvar, x));
8479 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8481 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8482 tree new_var = lookup_decl (var, ctx);
8483 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8484 if (TREE_CONSTANT (x))
8485 break;
8486 else
8488 tree atmp
8489 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8490 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8491 tree al = size_int (TYPE_ALIGN (rtype));
8492 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8495 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8496 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8497 gimple_seq_add_stmt (&new_body,
8498 gimple_build_assign (new_var, x));
8500 break;
8503 gimple_seq fork_seq = NULL;
8504 gimple_seq join_seq = NULL;
8506 if (is_oacc_parallel (ctx))
8508 /* If there are reductions on the offloaded region itself, treat
8509 them as a dummy GANG loop. */
8510 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8512 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8513 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8516 gimple_seq_add_seq (&new_body, fork_seq);
8517 gimple_seq_add_seq (&new_body, tgt_body);
8518 gimple_seq_add_seq (&new_body, join_seq);
8520 if (offloaded)
8521 new_body = maybe_catch_exception (new_body);
8523 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8524 gimple_omp_set_body (stmt, new_body);
8527 bind = gimple_build_bind (NULL, NULL,
8528 tgt_bind ? gimple_bind_block (tgt_bind)
8529 : NULL_TREE);
8530 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8531 gimple_bind_add_seq (bind, ilist);
8532 gimple_bind_add_stmt (bind, stmt);
8533 gimple_bind_add_seq (bind, olist);
8535 pop_gimplify_context (NULL);
8537 if (dep_bind)
8539 gimple_bind_add_seq (dep_bind, dep_ilist);
8540 gimple_bind_add_stmt (dep_bind, bind);
8541 gimple_bind_add_seq (dep_bind, dep_olist);
8542 pop_gimplify_context (dep_bind);
8546 /* Expand code for an OpenMP teams directive. */
8548 static void
8549 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8551 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8552 push_gimplify_context ();
8554 tree block = make_node (BLOCK);
8555 gbind *bind = gimple_build_bind (NULL, NULL, block);
8556 gsi_replace (gsi_p, bind, true);
8557 gimple_seq bind_body = NULL;
8558 gimple_seq dlist = NULL;
8559 gimple_seq olist = NULL;
8561 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8562 OMP_CLAUSE_NUM_TEAMS);
8563 if (num_teams == NULL_TREE)
8564 num_teams = build_int_cst (unsigned_type_node, 0);
8565 else
8567 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8568 num_teams = fold_convert (unsigned_type_node, num_teams);
8569 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8571 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8572 OMP_CLAUSE_THREAD_LIMIT);
8573 if (thread_limit == NULL_TREE)
8574 thread_limit = build_int_cst (unsigned_type_node, 0);
8575 else
8577 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8578 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8579 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8580 fb_rvalue);
8583 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8584 &bind_body, &dlist, ctx, NULL);
8585 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8586 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8587 if (!gimple_omp_teams_grid_phony (teams_stmt))
8589 gimple_seq_add_stmt (&bind_body, teams_stmt);
8590 location_t loc = gimple_location (teams_stmt);
8591 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8592 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8593 gimple_set_location (call, loc);
8594 gimple_seq_add_stmt (&bind_body, call);
8597 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8598 gimple_omp_set_body (teams_stmt, NULL);
8599 gimple_seq_add_seq (&bind_body, olist);
8600 gimple_seq_add_seq (&bind_body, dlist);
8601 if (!gimple_omp_teams_grid_phony (teams_stmt))
8602 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8603 gimple_bind_set_body (bind, bind_body);
8605 pop_gimplify_context (bind);
8607 gimple_bind_append_vars (bind, ctx->block_vars);
8608 BLOCK_VARS (block) = ctx->block_vars;
8609 if (BLOCK_VARS (block))
8610 TREE_USED (block) = 1;
8613 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8615 static void
8616 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8618 gimple *stmt = gsi_stmt (*gsi_p);
8619 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8620 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8621 gimple_build_omp_return (false));
8625 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8626 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8627 of OMP context, but with task_shared_vars set. */
8629 static tree
8630 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8631 void *data)
8633 tree t = *tp;
8635 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8636 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8637 return t;
8639 if (task_shared_vars
8640 && DECL_P (t)
8641 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8642 return t;
8644 /* If a global variable has been privatized, TREE_CONSTANT on
8645 ADDR_EXPR might be wrong. */
8646 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8647 recompute_tree_invariant_for_addr_expr (t);
8649 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8650 return NULL_TREE;
8653 /* Data to be communicated between lower_omp_regimplify_operands and
8654 lower_omp_regimplify_operands_p. */
8656 struct lower_omp_regimplify_operands_data
8658 omp_context *ctx;
8659 vec<tree> *decls;
8662 /* Helper function for lower_omp_regimplify_operands. Find
8663 omp_member_access_dummy_var vars and adjust temporarily their
8664 DECL_VALUE_EXPRs if needed. */
8666 static tree
8667 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8668 void *data)
8670 tree t = omp_member_access_dummy_var (*tp);
8671 if (t)
8673 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8674 lower_omp_regimplify_operands_data *ldata
8675 = (lower_omp_regimplify_operands_data *) wi->info;
8676 tree o = maybe_lookup_decl (t, ldata->ctx);
8677 if (o != t)
8679 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8680 ldata->decls->safe_push (*tp);
8681 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8682 SET_DECL_VALUE_EXPR (*tp, v);
8685 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8686 return NULL_TREE;
8689 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8690 of omp_member_access_dummy_var vars during regimplification. */
8692 static void
8693 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8694 gimple_stmt_iterator *gsi_p)
8696 auto_vec<tree, 10> decls;
8697 if (ctx)
8699 struct walk_stmt_info wi;
8700 memset (&wi, '\0', sizeof (wi));
8701 struct lower_omp_regimplify_operands_data data;
8702 data.ctx = ctx;
8703 data.decls = &decls;
8704 wi.info = &data;
8705 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8707 gimple_regimplify_operands (stmt, gsi_p);
8708 while (!decls.is_empty ())
8710 tree t = decls.pop ();
8711 tree v = decls.pop ();
8712 SET_DECL_VALUE_EXPR (t, v);
8716 static void
8717 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8719 gimple *stmt = gsi_stmt (*gsi_p);
8720 struct walk_stmt_info wi;
8721 gcall *call_stmt;
8723 if (gimple_has_location (stmt))
8724 input_location = gimple_location (stmt);
8726 if (task_shared_vars)
8727 memset (&wi, '\0', sizeof (wi));
8729 /* If we have issued syntax errors, avoid doing any heavy lifting.
8730 Just replace the OMP directives with a NOP to avoid
8731 confusing RTL expansion. */
8732 if (seen_error () && is_gimple_omp (stmt))
8734 gsi_replace (gsi_p, gimple_build_nop (), true);
8735 return;
8738 switch (gimple_code (stmt))
8740 case GIMPLE_COND:
8742 gcond *cond_stmt = as_a <gcond *> (stmt);
8743 if ((ctx || task_shared_vars)
8744 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8745 lower_omp_regimplify_p,
8746 ctx ? NULL : &wi, NULL)
8747 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8748 lower_omp_regimplify_p,
8749 ctx ? NULL : &wi, NULL)))
8750 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8752 break;
8753 case GIMPLE_CATCH:
8754 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8755 break;
8756 case GIMPLE_EH_FILTER:
8757 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8758 break;
8759 case GIMPLE_TRY:
8760 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8761 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8762 break;
8763 case GIMPLE_TRANSACTION:
8764 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8765 ctx);
8766 break;
8767 case GIMPLE_BIND:
8768 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8769 break;
8770 case GIMPLE_OMP_PARALLEL:
8771 case GIMPLE_OMP_TASK:
8772 ctx = maybe_lookup_ctx (stmt);
8773 gcc_assert (ctx);
8774 if (ctx->cancellable)
8775 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8776 lower_omp_taskreg (gsi_p, ctx);
8777 break;
8778 case GIMPLE_OMP_FOR:
8779 ctx = maybe_lookup_ctx (stmt);
8780 gcc_assert (ctx);
8781 if (ctx->cancellable)
8782 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8783 lower_omp_for (gsi_p, ctx);
8784 break;
8785 case GIMPLE_OMP_SECTIONS:
8786 ctx = maybe_lookup_ctx (stmt);
8787 gcc_assert (ctx);
8788 if (ctx->cancellable)
8789 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8790 lower_omp_sections (gsi_p, ctx);
8791 break;
8792 case GIMPLE_OMP_SINGLE:
8793 ctx = maybe_lookup_ctx (stmt);
8794 gcc_assert (ctx);
8795 lower_omp_single (gsi_p, ctx);
8796 break;
8797 case GIMPLE_OMP_MASTER:
8798 ctx = maybe_lookup_ctx (stmt);
8799 gcc_assert (ctx);
8800 lower_omp_master (gsi_p, ctx);
8801 break;
8802 case GIMPLE_OMP_TASKGROUP:
8803 ctx = maybe_lookup_ctx (stmt);
8804 gcc_assert (ctx);
8805 lower_omp_taskgroup (gsi_p, ctx);
8806 break;
8807 case GIMPLE_OMP_ORDERED:
8808 ctx = maybe_lookup_ctx (stmt);
8809 gcc_assert (ctx);
8810 lower_omp_ordered (gsi_p, ctx);
8811 break;
8812 case GIMPLE_OMP_CRITICAL:
8813 ctx = maybe_lookup_ctx (stmt);
8814 gcc_assert (ctx);
8815 lower_omp_critical (gsi_p, ctx);
8816 break;
8817 case GIMPLE_OMP_ATOMIC_LOAD:
8818 if ((ctx || task_shared_vars)
8819 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8820 as_a <gomp_atomic_load *> (stmt)),
8821 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8822 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8823 break;
8824 case GIMPLE_OMP_TARGET:
8825 ctx = maybe_lookup_ctx (stmt);
8826 gcc_assert (ctx);
8827 lower_omp_target (gsi_p, ctx);
8828 break;
8829 case GIMPLE_OMP_TEAMS:
8830 ctx = maybe_lookup_ctx (stmt);
8831 gcc_assert (ctx);
8832 lower_omp_teams (gsi_p, ctx);
8833 break;
8834 case GIMPLE_OMP_GRID_BODY:
8835 ctx = maybe_lookup_ctx (stmt);
8836 gcc_assert (ctx);
8837 lower_omp_grid_body (gsi_p, ctx);
8838 break;
8839 case GIMPLE_CALL:
8840 tree fndecl;
8841 call_stmt = as_a <gcall *> (stmt);
8842 fndecl = gimple_call_fndecl (call_stmt);
8843 if (fndecl
8844 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8845 switch (DECL_FUNCTION_CODE (fndecl))
8847 case BUILT_IN_GOMP_BARRIER:
8848 if (ctx == NULL)
8849 break;
8850 /* FALLTHRU */
8851 case BUILT_IN_GOMP_CANCEL:
8852 case BUILT_IN_GOMP_CANCELLATION_POINT:
8853 omp_context *cctx;
8854 cctx = ctx;
8855 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8856 cctx = cctx->outer;
8857 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8858 if (!cctx->cancellable)
8860 if (DECL_FUNCTION_CODE (fndecl)
8861 == BUILT_IN_GOMP_CANCELLATION_POINT)
8863 stmt = gimple_build_nop ();
8864 gsi_replace (gsi_p, stmt, false);
8866 break;
8868 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8870 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8871 gimple_call_set_fndecl (call_stmt, fndecl);
8872 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8874 tree lhs;
8875 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8876 gimple_call_set_lhs (call_stmt, lhs);
8877 tree fallthru_label;
8878 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8879 gimple *g;
8880 g = gimple_build_label (fallthru_label);
8881 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8882 g = gimple_build_cond (NE_EXPR, lhs,
8883 fold_convert (TREE_TYPE (lhs),
8884 boolean_false_node),
8885 cctx->cancel_label, fallthru_label);
8886 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8887 break;
8888 default:
8889 break;
8891 /* FALLTHRU */
8892 default:
8893 if ((ctx || task_shared_vars)
8894 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8895 ctx ? NULL : &wi))
8897 /* Just remove clobbers, this should happen only if we have
8898 "privatized" local addressable variables in SIMD regions,
8899 the clobber isn't needed in that case and gimplifying address
8900 of the ARRAY_REF into a pointer and creating MEM_REF based
8901 clobber would create worse code than we get with the clobber
8902 dropped. */
8903 if (gimple_clobber_p (stmt))
8905 gsi_replace (gsi_p, gimple_build_nop (), true);
8906 break;
8908 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8910 break;
8914 static void
8915 lower_omp (gimple_seq *body, omp_context *ctx)
8917 location_t saved_location = input_location;
8918 gimple_stmt_iterator gsi;
8919 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8920 lower_omp_1 (&gsi, ctx);
8921 /* During gimplification, we haven't folded statments inside offloading
8922 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8923 if (target_nesting_level || taskreg_nesting_level)
8924 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8925 fold_stmt (&gsi);
8926 input_location = saved_location;
8929 /* Main entry point. */
8931 static unsigned int
8932 execute_lower_omp (void)
8934 gimple_seq body;
8935 int i;
8936 omp_context *ctx;
8938 /* This pass always runs, to provide PROP_gimple_lomp.
8939 But often, there is nothing to do. */
8940 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8941 && flag_openmp_simd == 0)
8942 return 0;
8944 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8945 delete_omp_context);
8947 body = gimple_body (current_function_decl);
8949 if (hsa_gen_requested_p ())
8950 omp_grid_gridify_all_targets (&body);
8952 scan_omp (&body, NULL);
8953 gcc_assert (taskreg_nesting_level == 0);
8954 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8955 finish_taskreg_scan (ctx);
8956 taskreg_contexts.release ();
8958 if (all_contexts->root)
8960 if (task_shared_vars)
8961 push_gimplify_context ();
8962 lower_omp (&body, NULL);
8963 if (task_shared_vars)
8964 pop_gimplify_context (NULL);
8967 if (all_contexts)
8969 splay_tree_delete (all_contexts);
8970 all_contexts = NULL;
8972 BITMAP_FREE (task_shared_vars);
8973 return 0;
8976 namespace {
8978 const pass_data pass_data_lower_omp =
8980 GIMPLE_PASS, /* type */
8981 "omplower", /* name */
8982 OPTGROUP_OMP, /* optinfo_flags */
8983 TV_NONE, /* tv_id */
8984 PROP_gimple_any, /* properties_required */
8985 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8986 0, /* properties_destroyed */
8987 0, /* todo_flags_start */
8988 0, /* todo_flags_finish */
8991 class pass_lower_omp : public gimple_opt_pass
8993 public:
8994 pass_lower_omp (gcc::context *ctxt)
8995 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8998 /* opt_pass methods: */
8999 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9001 }; // class pass_lower_omp
9003 } // anon namespace
9005 gimple_opt_pass *
9006 make_pass_lower_omp (gcc::context *ctxt)
9008 return new pass_lower_omp (ctxt);
9011 /* The following is a utility to diagnose structured block violations.
9012 It is not part of the "omplower" pass, as that's invoked too late. It
9013 should be invoked by the respective front ends after gimplification. */
9015 static splay_tree all_labels;
9017 /* Check for mismatched contexts and generate an error if needed. Return
9018 true if an error is detected. */
9020 static bool
9021 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9022 gimple *branch_ctx, gimple *label_ctx)
9024 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9025 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9027 if (label_ctx == branch_ctx)
9028 return false;
9030 const char* kind = NULL;
9032 if (flag_cilkplus)
9034 if ((branch_ctx
9035 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9036 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9037 || (label_ctx
9038 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9039 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9040 kind = "Cilk Plus";
9042 if (flag_openacc)
9044 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9045 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9047 gcc_checking_assert (kind == NULL);
9048 kind = "OpenACC";
9051 if (kind == NULL)
9053 gcc_checking_assert (flag_openmp);
9054 kind = "OpenMP";
9057 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9058 so we could traverse it and issue a correct "exit" or "enter" error
9059 message upon a structured block violation.
9061 We built the context by building a list with tree_cons'ing, but there is
9062 no easy counterpart in gimple tuples. It seems like far too much work
9063 for issuing exit/enter error messages. If someone really misses the
9064 distinct error message... patches welcome. */
9066 #if 0
9067 /* Try to avoid confusing the user by producing and error message
9068 with correct "exit" or "enter" verbiage. We prefer "exit"
9069 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9070 if (branch_ctx == NULL)
9071 exit_p = false;
9072 else
9074 while (label_ctx)
9076 if (TREE_VALUE (label_ctx) == branch_ctx)
9078 exit_p = false;
9079 break;
9081 label_ctx = TREE_CHAIN (label_ctx);
9085 if (exit_p)
9086 error ("invalid exit from %s structured block", kind);
9087 else
9088 error ("invalid entry to %s structured block", kind);
9089 #endif
9091 /* If it's obvious we have an invalid entry, be specific about the error. */
9092 if (branch_ctx == NULL)
9093 error ("invalid entry to %s structured block", kind);
9094 else
9096 /* Otherwise, be vague and lazy, but efficient. */
9097 error ("invalid branch to/from %s structured block", kind);
9100 gsi_replace (gsi_p, gimple_build_nop (), false);
9101 return true;
9104 /* Pass 1: Create a minimal tree of structured blocks, and record
9105 where each label is found. */
9107 static tree
9108 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9109 struct walk_stmt_info *wi)
9111 gimple *context = (gimple *) wi->info;
9112 gimple *inner_context;
9113 gimple *stmt = gsi_stmt (*gsi_p);
9115 *handled_ops_p = true;
9117 switch (gimple_code (stmt))
9119 WALK_SUBSTMTS;
9121 case GIMPLE_OMP_PARALLEL:
9122 case GIMPLE_OMP_TASK:
9123 case GIMPLE_OMP_SECTIONS:
9124 case GIMPLE_OMP_SINGLE:
9125 case GIMPLE_OMP_SECTION:
9126 case GIMPLE_OMP_MASTER:
9127 case GIMPLE_OMP_ORDERED:
9128 case GIMPLE_OMP_CRITICAL:
9129 case GIMPLE_OMP_TARGET:
9130 case GIMPLE_OMP_TEAMS:
9131 case GIMPLE_OMP_TASKGROUP:
9132 /* The minimal context here is just the current OMP construct. */
9133 inner_context = stmt;
9134 wi->info = inner_context;
9135 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9136 wi->info = context;
9137 break;
9139 case GIMPLE_OMP_FOR:
9140 inner_context = stmt;
9141 wi->info = inner_context;
9142 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9143 walk them. */
9144 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9145 diagnose_sb_1, NULL, wi);
9146 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9147 wi->info = context;
9148 break;
9150 case GIMPLE_LABEL:
9151 splay_tree_insert (all_labels,
9152 (splay_tree_key) gimple_label_label (
9153 as_a <glabel *> (stmt)),
9154 (splay_tree_value) context);
9155 break;
9157 default:
9158 break;
9161 return NULL_TREE;
9164 /* Pass 2: Check each branch and see if its context differs from that of
9165 the destination label's context. */
9167 static tree
9168 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9169 struct walk_stmt_info *wi)
9171 gimple *context = (gimple *) wi->info;
9172 splay_tree_node n;
9173 gimple *stmt = gsi_stmt (*gsi_p);
9175 *handled_ops_p = true;
9177 switch (gimple_code (stmt))
9179 WALK_SUBSTMTS;
9181 case GIMPLE_OMP_PARALLEL:
9182 case GIMPLE_OMP_TASK:
9183 case GIMPLE_OMP_SECTIONS:
9184 case GIMPLE_OMP_SINGLE:
9185 case GIMPLE_OMP_SECTION:
9186 case GIMPLE_OMP_MASTER:
9187 case GIMPLE_OMP_ORDERED:
9188 case GIMPLE_OMP_CRITICAL:
9189 case GIMPLE_OMP_TARGET:
9190 case GIMPLE_OMP_TEAMS:
9191 case GIMPLE_OMP_TASKGROUP:
9192 wi->info = stmt;
9193 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9194 wi->info = context;
9195 break;
9197 case GIMPLE_OMP_FOR:
9198 wi->info = stmt;
9199 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9200 walk them. */
9201 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9202 diagnose_sb_2, NULL, wi);
9203 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9204 wi->info = context;
9205 break;
9207 case GIMPLE_COND:
9209 gcond *cond_stmt = as_a <gcond *> (stmt);
9210 tree lab = gimple_cond_true_label (cond_stmt);
9211 if (lab)
9213 n = splay_tree_lookup (all_labels,
9214 (splay_tree_key) lab);
9215 diagnose_sb_0 (gsi_p, context,
9216 n ? (gimple *) n->value : NULL);
9218 lab = gimple_cond_false_label (cond_stmt);
9219 if (lab)
9221 n = splay_tree_lookup (all_labels,
9222 (splay_tree_key) lab);
9223 diagnose_sb_0 (gsi_p, context,
9224 n ? (gimple *) n->value : NULL);
9227 break;
9229 case GIMPLE_GOTO:
9231 tree lab = gimple_goto_dest (stmt);
9232 if (TREE_CODE (lab) != LABEL_DECL)
9233 break;
9235 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9236 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9238 break;
9240 case GIMPLE_SWITCH:
9242 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9243 unsigned int i;
9244 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9246 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9247 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9248 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9249 break;
9252 break;
9254 case GIMPLE_RETURN:
9255 diagnose_sb_0 (gsi_p, context, NULL);
9256 break;
9258 default:
9259 break;
9262 return NULL_TREE;
9265 static unsigned int
9266 diagnose_omp_structured_block_errors (void)
9268 struct walk_stmt_info wi;
9269 gimple_seq body = gimple_body (current_function_decl);
9271 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9273 memset (&wi, 0, sizeof (wi));
9274 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9276 memset (&wi, 0, sizeof (wi));
9277 wi.want_locations = true;
9278 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9280 gimple_set_body (current_function_decl, body);
9282 splay_tree_delete (all_labels);
9283 all_labels = NULL;
9285 return 0;
9288 namespace {
9290 const pass_data pass_data_diagnose_omp_blocks =
9292 GIMPLE_PASS, /* type */
9293 "*diagnose_omp_blocks", /* name */
9294 OPTGROUP_OMP, /* optinfo_flags */
9295 TV_NONE, /* tv_id */
9296 PROP_gimple_any, /* properties_required */
9297 0, /* properties_provided */
9298 0, /* properties_destroyed */
9299 0, /* todo_flags_start */
9300 0, /* todo_flags_finish */
9303 class pass_diagnose_omp_blocks : public gimple_opt_pass
9305 public:
9306 pass_diagnose_omp_blocks (gcc::context *ctxt)
9307 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9310 /* opt_pass methods: */
9311 virtual bool gate (function *)
9313 return flag_cilkplus || flag_openacc || flag_openmp;
9315 virtual unsigned int execute (function *)
9317 return diagnose_omp_structured_block_errors ();
9320 }; // class pass_diagnose_omp_blocks
9322 } // anon namespace
9324 gimple_opt_pass *
9325 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9327 return new pass_diagnose_omp_blocks (ctxt);
9331 #include "gt-omp-low.h"