PR sanitizer/80403
[official-gcc.git] / gcc / omp-low.c
blob22772ba28918eeeab039c8b7db0562334e1de7d0
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
115 /* What to do with variables with implicitly determined sharing
116 attributes. */
117 enum omp_clause_default_kind default_kind;
119 /* Nesting depth of this context. Used to beautify error messages re
120 invalid gotos. The outermost ctx is depth 1, with depth 0 being
121 reserved for the main body of the function. */
122 int depth;
124 /* True if this parallel directive is nested within another. */
125 bool is_nested;
127 /* True if this construct can be cancelled. */
128 bool cancellable;
131 static splay_tree all_contexts;
132 static int taskreg_nesting_level;
133 static int target_nesting_level;
134 static bitmap task_shared_vars;
135 static vec<omp_context *> taskreg_contexts;
137 static void scan_omp (gimple_seq *, omp_context *);
138 static tree scan_omp_1_op (tree *, int *, void *);
140 #define WALK_SUBSTMTS \
141 case GIMPLE_BIND: \
142 case GIMPLE_TRY: \
143 case GIMPLE_CATCH: \
144 case GIMPLE_EH_FILTER: \
145 case GIMPLE_TRANSACTION: \
146 /* The sub-statements for these should be walked. */ \
147 *handled_ops_p = false; \
148 break;
150 /* Return true if CTX corresponds to an oacc parallel region. */
152 static bool
153 is_oacc_parallel (omp_context *ctx)
155 enum gimple_code outer_type = gimple_code (ctx->stmt);
156 return ((outer_type == GIMPLE_OMP_TARGET)
157 && (gimple_omp_target_kind (ctx->stmt)
158 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
161 /* Return true if CTX corresponds to an oacc kernels region. */
163 static bool
164 is_oacc_kernels (omp_context *ctx)
166 enum gimple_code outer_type = gimple_code (ctx->stmt);
167 return ((outer_type == GIMPLE_OMP_TARGET)
168 && (gimple_omp_target_kind (ctx->stmt)
169 == GF_OMP_TARGET_KIND_OACC_KERNELS));
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173 data member privatization, return the underlying "this" parameter,
174 otherwise return NULL. */
176 tree
177 omp_member_access_dummy_var (tree decl)
179 if (!VAR_P (decl)
180 || !DECL_ARTIFICIAL (decl)
181 || !DECL_IGNORED_P (decl)
182 || !DECL_HAS_VALUE_EXPR_P (decl)
183 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
184 return NULL_TREE;
186 tree v = DECL_VALUE_EXPR (decl);
187 if (TREE_CODE (v) != COMPONENT_REF)
188 return NULL_TREE;
190 while (1)
191 switch (TREE_CODE (v))
193 case COMPONENT_REF:
194 case MEM_REF:
195 case INDIRECT_REF:
196 CASE_CONVERT:
197 case POINTER_PLUS_EXPR:
198 v = TREE_OPERAND (v, 0);
199 continue;
200 case PARM_DECL:
201 if (DECL_CONTEXT (v) == current_function_decl
202 && DECL_ARTIFICIAL (v)
203 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
204 return v;
205 return NULL_TREE;
206 default:
207 return NULL_TREE;
211 /* Helper for unshare_and_remap, called through walk_tree. */
213 static tree
214 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
216 tree *pair = (tree *) data;
217 if (*tp == pair[0])
219 *tp = unshare_expr (pair[1]);
220 *walk_subtrees = 0;
222 else if (IS_TYPE_OR_DECL_P (*tp))
223 *walk_subtrees = 0;
224 return NULL_TREE;
227 /* Return unshare_expr (X) with all occurrences of FROM
228 replaced with TO. */
230 static tree
231 unshare_and_remap (tree x, tree from, tree to)
233 tree pair[2] = { from, to };
234 x = unshare_expr (x);
235 walk_tree (&x, unshare_and_remap_1, pair, NULL);
236 return x;
239 /* Convenience function for calling scan_omp_1_op on tree operands. */
241 static inline tree
242 scan_omp_op (tree *tp, omp_context *ctx)
244 struct walk_stmt_info wi;
246 memset (&wi, 0, sizeof (wi));
247 wi.info = ctx;
248 wi.want_locations = true;
250 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
253 static void lower_omp (gimple_seq *, omp_context *);
254 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
255 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
257 /* Return true if CTX is for an omp parallel. */
259 static inline bool
260 is_parallel_ctx (omp_context *ctx)
262 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
266 /* Return true if CTX is for an omp task. */
268 static inline bool
269 is_task_ctx (omp_context *ctx)
271 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
275 /* Return true if CTX is for an omp taskloop. */
277 static inline bool
278 is_taskloop_ctx (omp_context *ctx)
280 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
281 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
285 /* Return true if CTX is for an omp parallel or omp task. */
287 static inline bool
288 is_taskreg_ctx (omp_context *ctx)
290 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
293 /* Return true if EXPR is variable sized. */
295 static inline bool
296 is_variable_sized (const_tree expr)
298 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
301 /* Lookup variables. The "maybe" form
302 allows for the variable form to not have been entered, otherwise we
303 assert that the variable must have been entered. */
305 static inline tree
306 lookup_decl (tree var, omp_context *ctx)
308 tree *n = ctx->cb.decl_map->get (var);
309 return *n;
312 static inline tree
313 maybe_lookup_decl (const_tree var, omp_context *ctx)
315 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
316 return n ? *n : NULL_TREE;
319 static inline tree
320 lookup_field (tree var, omp_context *ctx)
322 splay_tree_node n;
323 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
324 return (tree) n->value;
327 static inline tree
328 lookup_sfield (splay_tree_key key, omp_context *ctx)
330 splay_tree_node n;
331 n = splay_tree_lookup (ctx->sfield_map
332 ? ctx->sfield_map : ctx->field_map, key);
333 return (tree) n->value;
336 static inline tree
337 lookup_sfield (tree var, omp_context *ctx)
339 return lookup_sfield ((splay_tree_key) var, ctx);
342 static inline tree
343 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
345 splay_tree_node n;
346 n = splay_tree_lookup (ctx->field_map, key);
347 return n ? (tree) n->value : NULL_TREE;
350 static inline tree
351 maybe_lookup_field (tree var, omp_context *ctx)
353 return maybe_lookup_field ((splay_tree_key) var, ctx);
356 /* Return true if DECL should be copied by pointer. SHARED_CTX is
357 the parallel context if DECL is to be shared. */
359 static bool
360 use_pointer_for_field (tree decl, omp_context *shared_ctx)
362 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
363 || TYPE_ATOMIC (TREE_TYPE (decl)))
364 return true;
366 /* We can only use copy-in/copy-out semantics for shared variables
367 when we know the value is not accessible from an outer scope. */
368 if (shared_ctx)
370 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
372 /* ??? Trivially accessible from anywhere. But why would we even
373 be passing an address in this case? Should we simply assert
374 this to be false, or should we have a cleanup pass that removes
375 these from the list of mappings? */
376 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
377 return true;
379 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 without analyzing the expression whether or not its location
381 is accessible to anyone else. In the case of nested parallel
382 regions it certainly may be. */
383 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
384 return true;
386 /* Do not use copy-in/copy-out for variables that have their
387 address taken. */
388 if (TREE_ADDRESSABLE (decl))
389 return true;
391 /* lower_send_shared_vars only uses copy-in, but not copy-out
392 for these. */
393 if (TREE_READONLY (decl)
394 || ((TREE_CODE (decl) == RESULT_DECL
395 || TREE_CODE (decl) == PARM_DECL)
396 && DECL_BY_REFERENCE (decl)))
397 return false;
399 /* Disallow copy-in/out in nested parallel if
400 decl is shared in outer parallel, otherwise
401 each thread could store the shared variable
402 in its own copy-in location, making the
403 variable no longer really shared. */
404 if (shared_ctx->is_nested)
406 omp_context *up;
408 for (up = shared_ctx->outer; up; up = up->outer)
409 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
410 break;
412 if (up)
414 tree c;
416 for (c = gimple_omp_taskreg_clauses (up->stmt);
417 c; c = OMP_CLAUSE_CHAIN (c))
418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
419 && OMP_CLAUSE_DECL (c) == decl)
420 break;
422 if (c)
423 goto maybe_mark_addressable_and_ret;
427 /* For tasks avoid using copy-in/out. As tasks can be
428 deferred or executed in different thread, when GOMP_task
429 returns, the task hasn't necessarily terminated. */
430 if (is_task_ctx (shared_ctx))
432 tree outer;
433 maybe_mark_addressable_and_ret:
434 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
435 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
437 /* Taking address of OUTER in lower_send_shared_vars
438 might need regimplification of everything that uses the
439 variable. */
440 if (!task_shared_vars)
441 task_shared_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
443 TREE_ADDRESSABLE (outer) = 1;
445 return true;
449 return false;
452 /* Construct a new automatic decl similar to VAR. */
454 static tree
455 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
457 tree copy = copy_var_decl (var, name, type);
459 DECL_CONTEXT (copy) = current_function_decl;
460 DECL_CHAIN (copy) = ctx->block_vars;
461 /* If VAR is listed in task_shared_vars, it means it wasn't
462 originally addressable and is just because task needs to take
463 it's address. But we don't need to take address of privatizations
464 from that var. */
465 if (TREE_ADDRESSABLE (var)
466 && task_shared_vars
467 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
468 TREE_ADDRESSABLE (copy) = 0;
469 ctx->block_vars = copy;
471 return copy;
474 static tree
475 omp_copy_decl_1 (tree var, omp_context *ctx)
477 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 as appropriate. */
482 static tree
483 omp_build_component_ref (tree obj, tree field)
485 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
486 if (TREE_THIS_VOLATILE (field))
487 TREE_THIS_VOLATILE (ret) |= 1;
488 if (TREE_READONLY (field))
489 TREE_READONLY (ret) |= 1;
490 return ret;
493 /* Build tree nodes to access the field for VAR on the receiver side. */
495 static tree
496 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
498 tree x, field = lookup_field (var, ctx);
500 /* If the receiver record type was remapped in the child function,
501 remap the field into the new record type. */
502 x = maybe_lookup_field (field, ctx);
503 if (x != NULL)
504 field = x;
506 x = build_simple_mem_ref (ctx->receiver_decl);
507 TREE_THIS_NOTRAP (x) = 1;
508 x = omp_build_component_ref (x, field);
509 if (by_ref)
511 x = build_simple_mem_ref (x);
512 TREE_THIS_NOTRAP (x) = 1;
515 return x;
518 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
519 of a parallel, this is a component reference; for workshare constructs
520 this is some variable. */
522 static tree
523 build_outer_var_ref (tree var, omp_context *ctx,
524 enum omp_clause_code code = OMP_CLAUSE_ERROR)
526 tree x;
528 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
529 x = var;
530 else if (is_variable_sized (var))
532 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
533 x = build_outer_var_ref (x, ctx, code);
534 x = build_simple_mem_ref (x);
536 else if (is_taskreg_ctx (ctx))
538 bool by_ref = use_pointer_for_field (var, NULL);
539 x = build_receiver_ref (var, by_ref, ctx);
541 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
542 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
543 || (code == OMP_CLAUSE_PRIVATE
544 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
545 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
546 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
548 /* #pragma omp simd isn't a worksharing construct, and can reference
549 even private vars in its linear etc. clauses.
550 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 to private vars in all worksharing constructs. */
552 x = NULL_TREE;
553 if (ctx->outer && is_taskreg_ctx (ctx))
554 x = lookup_decl (var, ctx->outer);
555 else if (ctx->outer)
556 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
557 if (x == NULL_TREE)
558 x = var;
560 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
562 gcc_assert (ctx->outer);
563 splay_tree_node n
564 = splay_tree_lookup (ctx->outer->field_map,
565 (splay_tree_key) &DECL_UID (var));
566 if (n == NULL)
568 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
569 x = var;
570 else
571 x = lookup_decl (var, ctx->outer);
573 else
575 tree field = (tree) n->value;
576 /* If the receiver record type was remapped in the child function,
577 remap the field into the new record type. */
578 x = maybe_lookup_field (field, ctx->outer);
579 if (x != NULL)
580 field = x;
582 x = build_simple_mem_ref (ctx->outer->receiver_decl);
583 x = omp_build_component_ref (x, field);
584 if (use_pointer_for_field (var, ctx->outer))
585 x = build_simple_mem_ref (x);
588 else if (ctx->outer)
590 omp_context *outer = ctx->outer;
591 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
593 outer = outer->outer;
594 gcc_assert (outer
595 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
597 x = lookup_decl (var, outer);
599 else if (omp_is_reference (var))
600 /* This can happen with orphaned constructs. If var is reference, it is
601 possible it is shared and as such valid. */
602 x = var;
603 else if (omp_member_access_dummy_var (var))
604 x = var;
605 else
606 gcc_unreachable ();
608 if (x == var)
610 tree t = omp_member_access_dummy_var (var);
611 if (t)
613 x = DECL_VALUE_EXPR (var);
614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
615 if (o != t)
616 x = unshare_and_remap (x, t, o);
617 else
618 x = unshare_expr (x);
622 if (omp_is_reference (var))
623 x = build_simple_mem_ref (x);
625 return x;
628 /* Build tree nodes to access the field for VAR on the sender side. */
630 static tree
631 build_sender_ref (splay_tree_key key, omp_context *ctx)
633 tree field = lookup_sfield (key, ctx);
634 return omp_build_component_ref (ctx->sender_decl, field);
637 static tree
638 build_sender_ref (tree var, omp_context *ctx)
640 return build_sender_ref ((splay_tree_key) var, ctx);
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
644 BASE_POINTERS_RESTRICT, declare the field with restrict. */
646 static void
647 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
648 bool base_pointers_restrict = false)
650 tree field, type, sfield = NULL_TREE;
651 splay_tree_key key = (splay_tree_key) var;
653 if ((mask & 8) != 0)
655 key = (splay_tree_key) &DECL_UID (var);
656 gcc_checking_assert (key != (splay_tree_key) var);
658 gcc_assert ((mask & 1) == 0
659 || !splay_tree_lookup (ctx->field_map, key));
660 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
661 || !splay_tree_lookup (ctx->sfield_map, key));
662 gcc_assert ((mask & 3) == 3
663 || !is_gimple_omp_oacc (ctx->stmt));
665 type = TREE_TYPE (var);
666 /* Prevent redeclaring the var in the split-off function with a restrict
667 pointer type. Note that we only clear type itself, restrict qualifiers in
668 the pointed-to type will be ignored by points-to analysis. */
669 if (POINTER_TYPE_P (type)
670 && TYPE_RESTRICT (type))
671 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
673 if (mask & 4)
675 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
676 type = build_pointer_type (build_pointer_type (type));
678 else if (by_ref)
680 type = build_pointer_type (type);
681 if (base_pointers_restrict)
682 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
684 else if ((mask & 3) == 1 && omp_is_reference (var))
685 type = TREE_TYPE (type);
687 field = build_decl (DECL_SOURCE_LOCATION (var),
688 FIELD_DECL, DECL_NAME (var), type);
690 /* Remember what variable this field was created for. This does have a
691 side effect of making dwarf2out ignore this member, so for helpful
692 debugging we clear it later in delete_omp_context. */
693 DECL_ABSTRACT_ORIGIN (field) = var;
694 if (type == TREE_TYPE (var))
696 SET_DECL_ALIGN (field, DECL_ALIGN (var));
697 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
698 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
700 else
701 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
703 if ((mask & 3) == 3)
705 insert_field_into_struct (ctx->record_type, field);
706 if (ctx->srecord_type)
708 sfield = build_decl (DECL_SOURCE_LOCATION (var),
709 FIELD_DECL, DECL_NAME (var), type);
710 DECL_ABSTRACT_ORIGIN (sfield) = var;
711 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
712 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
713 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
714 insert_field_into_struct (ctx->srecord_type, sfield);
717 else
719 if (ctx->srecord_type == NULL_TREE)
721 tree t;
723 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
724 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
725 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
727 sfield = build_decl (DECL_SOURCE_LOCATION (t),
728 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
729 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
730 insert_field_into_struct (ctx->srecord_type, sfield);
731 splay_tree_insert (ctx->sfield_map,
732 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
733 (splay_tree_value) sfield);
736 sfield = field;
737 insert_field_into_struct ((mask & 1) ? ctx->record_type
738 : ctx->srecord_type, field);
741 if (mask & 1)
742 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
743 if ((mask & 2) && ctx->sfield_map)
744 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
747 static tree
748 install_var_local (tree var, omp_context *ctx)
750 tree new_var = omp_copy_decl_1 (var, ctx);
751 insert_decl_map (&ctx->cb, var, new_var);
752 return new_var;
755 /* Adjust the replacement for DECL in CTX for the new context. This means
756 copying the DECL_VALUE_EXPR, and fixing up the type. */
758 static void
759 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
761 tree new_decl, size;
763 new_decl = lookup_decl (decl, ctx);
765 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
767 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
768 && DECL_HAS_VALUE_EXPR_P (decl))
770 tree ve = DECL_VALUE_EXPR (decl);
771 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
772 SET_DECL_VALUE_EXPR (new_decl, ve);
773 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
776 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
778 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
779 if (size == error_mark_node)
780 size = TYPE_SIZE (TREE_TYPE (new_decl));
781 DECL_SIZE (new_decl) = size;
783 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
784 if (size == error_mark_node)
785 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
786 DECL_SIZE_UNIT (new_decl) = size;
790 /* The callback for remap_decl. Search all containing contexts for a
791 mapping of the variable; this avoids having to duplicate the splay
792 tree ahead of time. We know a mapping doesn't already exist in the
793 given context. Create new mappings to implement default semantics. */
795 static tree
796 omp_copy_decl (tree var, copy_body_data *cb)
798 omp_context *ctx = (omp_context *) cb;
799 tree new_var;
801 if (TREE_CODE (var) == LABEL_DECL)
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_DEFAULT:
1166 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1167 break;
1169 case OMP_CLAUSE_FINAL:
1170 case OMP_CLAUSE_IF:
1171 case OMP_CLAUSE_NUM_THREADS:
1172 case OMP_CLAUSE_NUM_TEAMS:
1173 case OMP_CLAUSE_THREAD_LIMIT:
1174 case OMP_CLAUSE_DEVICE:
1175 case OMP_CLAUSE_SCHEDULE:
1176 case OMP_CLAUSE_DIST_SCHEDULE:
1177 case OMP_CLAUSE_DEPEND:
1178 case OMP_CLAUSE_PRIORITY:
1179 case OMP_CLAUSE_GRAINSIZE:
1180 case OMP_CLAUSE_NUM_TASKS:
1181 case OMP_CLAUSE__CILK_FOR_COUNT_:
1182 case OMP_CLAUSE_NUM_GANGS:
1183 case OMP_CLAUSE_NUM_WORKERS:
1184 case OMP_CLAUSE_VECTOR_LENGTH:
1185 if (ctx->outer)
1186 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1187 break;
1189 case OMP_CLAUSE_TO:
1190 case OMP_CLAUSE_FROM:
1191 case OMP_CLAUSE_MAP:
1192 if (ctx->outer)
1193 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1194 decl = OMP_CLAUSE_DECL (c);
1195 /* Global variables with "omp declare target" attribute
1196 don't need to be copied, the receiver side will use them
1197 directly. However, global variables with "omp declare target link"
1198 attribute need to be copied. */
1199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1200 && DECL_P (decl)
1201 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1202 && (OMP_CLAUSE_MAP_KIND (c)
1203 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1204 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1205 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1206 && varpool_node::get_create (decl)->offloadable
1207 && !lookup_attribute ("omp declare target link",
1208 DECL_ATTRIBUTES (decl)))
1209 break;
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1211 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1213 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1214 not offloaded; there is nothing to map for those. */
1215 if (!is_gimple_omp_offloaded (ctx->stmt)
1216 && !POINTER_TYPE_P (TREE_TYPE (decl))
1217 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1218 break;
1220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1221 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1222 || (OMP_CLAUSE_MAP_KIND (c)
1223 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1225 if (TREE_CODE (decl) == COMPONENT_REF
1226 || (TREE_CODE (decl) == INDIRECT_REF
1227 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1228 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1229 == REFERENCE_TYPE)))
1230 break;
1231 if (DECL_SIZE (decl)
1232 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1234 tree decl2 = DECL_VALUE_EXPR (decl);
1235 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1236 decl2 = TREE_OPERAND (decl2, 0);
1237 gcc_assert (DECL_P (decl2));
1238 install_var_local (decl2, ctx);
1240 install_var_local (decl, ctx);
1241 break;
1243 if (DECL_P (decl))
1245 if (DECL_SIZE (decl)
1246 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1248 tree decl2 = DECL_VALUE_EXPR (decl);
1249 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1250 decl2 = TREE_OPERAND (decl2, 0);
1251 gcc_assert (DECL_P (decl2));
1252 install_var_field (decl2, true, 3, ctx);
1253 install_var_local (decl2, ctx);
1254 install_var_local (decl, ctx);
1256 else
1258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1259 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1260 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1261 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1262 install_var_field (decl, true, 7, ctx);
1263 else
1264 install_var_field (decl, true, 3, ctx,
1265 base_pointers_restrict);
1266 if (is_gimple_omp_offloaded (ctx->stmt)
1267 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1268 install_var_local (decl, ctx);
1271 else
1273 tree base = get_base_address (decl);
1274 tree nc = OMP_CLAUSE_CHAIN (c);
1275 if (DECL_P (base)
1276 && nc != NULL_TREE
1277 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1278 && OMP_CLAUSE_DECL (nc) == base
1279 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1280 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1282 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1283 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1285 else
1287 if (ctx->outer)
1289 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1290 decl = OMP_CLAUSE_DECL (c);
1292 gcc_assert (!splay_tree_lookup (ctx->field_map,
1293 (splay_tree_key) decl));
1294 tree field
1295 = build_decl (OMP_CLAUSE_LOCATION (c),
1296 FIELD_DECL, NULL_TREE, ptr_type_node);
1297 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1298 insert_field_into_struct (ctx->record_type, field);
1299 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1300 (splay_tree_value) field);
1303 break;
1305 case OMP_CLAUSE__GRIDDIM_:
1306 if (ctx->outer)
1308 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1309 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1311 break;
1313 case OMP_CLAUSE_NOWAIT:
1314 case OMP_CLAUSE_ORDERED:
1315 case OMP_CLAUSE_COLLAPSE:
1316 case OMP_CLAUSE_UNTIED:
1317 case OMP_CLAUSE_MERGEABLE:
1318 case OMP_CLAUSE_PROC_BIND:
1319 case OMP_CLAUSE_SAFELEN:
1320 case OMP_CLAUSE_SIMDLEN:
1321 case OMP_CLAUSE_THREADS:
1322 case OMP_CLAUSE_SIMD:
1323 case OMP_CLAUSE_NOGROUP:
1324 case OMP_CLAUSE_DEFAULTMAP:
1325 case OMP_CLAUSE_ASYNC:
1326 case OMP_CLAUSE_WAIT:
1327 case OMP_CLAUSE_GANG:
1328 case OMP_CLAUSE_WORKER:
1329 case OMP_CLAUSE_VECTOR:
1330 case OMP_CLAUSE_INDEPENDENT:
1331 case OMP_CLAUSE_AUTO:
1332 case OMP_CLAUSE_SEQ:
1333 case OMP_CLAUSE_TILE:
1334 case OMP_CLAUSE__SIMT_:
1335 break;
1337 case OMP_CLAUSE_ALIGNED:
1338 decl = OMP_CLAUSE_DECL (c);
1339 if (is_global_var (decl)
1340 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1341 install_var_local (decl, ctx);
1342 break;
1344 case OMP_CLAUSE__CACHE_:
1345 default:
1346 gcc_unreachable ();
1350 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1352 switch (OMP_CLAUSE_CODE (c))
1354 case OMP_CLAUSE_LASTPRIVATE:
1355 /* Let the corresponding firstprivate clause create
1356 the variable. */
1357 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1358 scan_array_reductions = true;
1359 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1360 break;
1361 /* FALLTHRU */
1363 case OMP_CLAUSE_FIRSTPRIVATE:
1364 case OMP_CLAUSE_PRIVATE:
1365 case OMP_CLAUSE_LINEAR:
1366 case OMP_CLAUSE_IS_DEVICE_PTR:
1367 decl = OMP_CLAUSE_DECL (c);
1368 if (is_variable_sized (decl))
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 tree decl2 = DECL_VALUE_EXPR (decl);
1375 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1376 decl2 = TREE_OPERAND (decl2, 0);
1377 gcc_assert (DECL_P (decl2));
1378 install_var_local (decl2, ctx);
1379 fixup_remapped_decl (decl2, ctx, false);
1381 install_var_local (decl, ctx);
1383 fixup_remapped_decl (decl, ctx,
1384 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1385 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1386 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1387 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1388 scan_array_reductions = true;
1389 break;
1391 case OMP_CLAUSE_REDUCTION:
1392 decl = OMP_CLAUSE_DECL (c);
1393 if (TREE_CODE (decl) != MEM_REF)
1395 if (is_variable_sized (decl))
1396 install_var_local (decl, ctx);
1397 fixup_remapped_decl (decl, ctx, false);
1399 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1400 scan_array_reductions = true;
1401 break;
1403 case OMP_CLAUSE_SHARED:
1404 /* Ignore shared directives in teams construct. */
1405 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1406 break;
1407 decl = OMP_CLAUSE_DECL (c);
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1409 break;
1410 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1412 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1413 ctx->outer)))
1414 break;
1415 bool by_ref = use_pointer_for_field (decl, ctx);
1416 install_var_field (decl, by_ref, 11, ctx);
1417 break;
1419 fixup_remapped_decl (decl, ctx, false);
1420 break;
1422 case OMP_CLAUSE_MAP:
1423 if (!is_gimple_omp_offloaded (ctx->stmt))
1424 break;
1425 decl = OMP_CLAUSE_DECL (c);
1426 if (DECL_P (decl)
1427 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1428 && (OMP_CLAUSE_MAP_KIND (c)
1429 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1430 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1431 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1432 && varpool_node::get_create (decl)->offloadable)
1433 break;
1434 if (DECL_P (decl))
1436 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1437 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1438 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1439 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1441 tree new_decl = lookup_decl (decl, ctx);
1442 TREE_TYPE (new_decl)
1443 = remap_type (TREE_TYPE (decl), &ctx->cb);
1445 else if (DECL_SIZE (decl)
1446 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1448 tree decl2 = DECL_VALUE_EXPR (decl);
1449 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1450 decl2 = TREE_OPERAND (decl2, 0);
1451 gcc_assert (DECL_P (decl2));
1452 fixup_remapped_decl (decl2, ctx, false);
1453 fixup_remapped_decl (decl, ctx, true);
1455 else
1456 fixup_remapped_decl (decl, ctx, false);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 case OMP_CLAUSE_DEFAULT:
1463 case OMP_CLAUSE_IF:
1464 case OMP_CLAUSE_NUM_THREADS:
1465 case OMP_CLAUSE_NUM_TEAMS:
1466 case OMP_CLAUSE_THREAD_LIMIT:
1467 case OMP_CLAUSE_DEVICE:
1468 case OMP_CLAUSE_SCHEDULE:
1469 case OMP_CLAUSE_DIST_SCHEDULE:
1470 case OMP_CLAUSE_NOWAIT:
1471 case OMP_CLAUSE_ORDERED:
1472 case OMP_CLAUSE_COLLAPSE:
1473 case OMP_CLAUSE_UNTIED:
1474 case OMP_CLAUSE_FINAL:
1475 case OMP_CLAUSE_MERGEABLE:
1476 case OMP_CLAUSE_PROC_BIND:
1477 case OMP_CLAUSE_SAFELEN:
1478 case OMP_CLAUSE_SIMDLEN:
1479 case OMP_CLAUSE_ALIGNED:
1480 case OMP_CLAUSE_DEPEND:
1481 case OMP_CLAUSE__LOOPTEMP_:
1482 case OMP_CLAUSE_TO:
1483 case OMP_CLAUSE_FROM:
1484 case OMP_CLAUSE_PRIORITY:
1485 case OMP_CLAUSE_GRAINSIZE:
1486 case OMP_CLAUSE_NUM_TASKS:
1487 case OMP_CLAUSE_THREADS:
1488 case OMP_CLAUSE_SIMD:
1489 case OMP_CLAUSE_NOGROUP:
1490 case OMP_CLAUSE_DEFAULTMAP:
1491 case OMP_CLAUSE_USE_DEVICE_PTR:
1492 case OMP_CLAUSE__CILK_FOR_COUNT_:
1493 case OMP_CLAUSE_ASYNC:
1494 case OMP_CLAUSE_WAIT:
1495 case OMP_CLAUSE_NUM_GANGS:
1496 case OMP_CLAUSE_NUM_WORKERS:
1497 case OMP_CLAUSE_VECTOR_LENGTH:
1498 case OMP_CLAUSE_GANG:
1499 case OMP_CLAUSE_WORKER:
1500 case OMP_CLAUSE_VECTOR:
1501 case OMP_CLAUSE_INDEPENDENT:
1502 case OMP_CLAUSE_AUTO:
1503 case OMP_CLAUSE_SEQ:
1504 case OMP_CLAUSE_TILE:
1505 case OMP_CLAUSE__GRIDDIM_:
1506 case OMP_CLAUSE__SIMT_:
1507 break;
1509 case OMP_CLAUSE__CACHE_:
1510 default:
1511 gcc_unreachable ();
1515 gcc_checking_assert (!scan_array_reductions
1516 || !is_gimple_omp_oacc (ctx->stmt));
1517 if (scan_array_reductions)
1519 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1521 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1523 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1524 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1527 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1529 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1530 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1531 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1535 /* Create a new name for omp child function. Returns an identifier. If
1536 IS_CILK_FOR is true then the suffix for the child function is
1537 "_cilk_for_fn." */
1539 static tree
1540 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1542 if (is_cilk_for)
1543 return clone_function_name (current_function_decl, "_cilk_for_fn");
1544 return clone_function_name (current_function_decl,
1545 task_copy ? "_omp_cpyfn" : "_omp_fn");
1548 /* Returns the type of the induction variable for the child function for
1549 _Cilk_for and the types for _high and _low variables based on TYPE. */
1551 static tree
1552 cilk_for_check_loop_diff_type (tree type)
1554 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1556 if (TYPE_UNSIGNED (type))
1557 return uint32_type_node;
1558 else
1559 return integer_type_node;
1561 else
1563 if (TYPE_UNSIGNED (type))
1564 return uint64_type_node;
1565 else
1566 return long_long_integer_type_node;
1570 /* Return true if CTX may belong to offloaded code: either if current function
1571 is offloaded, or any enclosing context corresponds to a target region. */
1573 static bool
1574 omp_maybe_offloaded_ctx (omp_context *ctx)
1576 if (cgraph_node::get (current_function_decl)->offloadable)
1577 return true;
1578 for (; ctx; ctx = ctx->outer)
1579 if (is_gimple_omp_offloaded (ctx->stmt))
1580 return true;
1581 return false;
1584 /* Build a decl for the omp child function. It'll not contain a body
1585 yet, just the bare decl. */
1587 static void
1588 create_omp_child_function (omp_context *ctx, bool task_copy)
1590 tree decl, type, name, t;
1592 tree cilk_for_count
1593 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1594 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1595 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1596 tree cilk_var_type = NULL_TREE;
1598 name = create_omp_child_function_name (task_copy,
1599 cilk_for_count != NULL_TREE);
1600 if (task_copy)
1601 type = build_function_type_list (void_type_node, ptr_type_node,
1602 ptr_type_node, NULL_TREE);
1603 else if (cilk_for_count)
1605 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1606 cilk_var_type = cilk_for_check_loop_diff_type (type);
1607 type = build_function_type_list (void_type_node, ptr_type_node,
1608 cilk_var_type, cilk_var_type, NULL_TREE);
1610 else
1611 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1613 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1615 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1616 || !task_copy);
1617 if (!task_copy)
1618 ctx->cb.dst_fn = decl;
1619 else
1620 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1622 TREE_STATIC (decl) = 1;
1623 TREE_USED (decl) = 1;
1624 DECL_ARTIFICIAL (decl) = 1;
1625 DECL_IGNORED_P (decl) = 0;
1626 TREE_PUBLIC (decl) = 0;
1627 DECL_UNINLINABLE (decl) = 1;
1628 DECL_EXTERNAL (decl) = 0;
1629 DECL_CONTEXT (decl) = NULL_TREE;
1630 DECL_INITIAL (decl) = make_node (BLOCK);
1631 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1632 if (omp_maybe_offloaded_ctx (ctx))
1634 cgraph_node::get_create (decl)->offloadable = 1;
1635 if (ENABLE_OFFLOADING)
1636 g->have_offload = true;
1639 if (cgraph_node::get_create (decl)->offloadable
1640 && !lookup_attribute ("omp declare target",
1641 DECL_ATTRIBUTES (current_function_decl)))
1643 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1644 ? "omp target entrypoint"
1645 : "omp declare target");
1646 DECL_ATTRIBUTES (decl)
1647 = tree_cons (get_identifier (target_attr),
1648 NULL_TREE, DECL_ATTRIBUTES (decl));
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 RESULT_DECL, NULL_TREE, void_type_node);
1653 DECL_ARTIFICIAL (t) = 1;
1654 DECL_IGNORED_P (t) = 1;
1655 DECL_CONTEXT (t) = decl;
1656 DECL_RESULT (decl) = t;
1658 /* _Cilk_for's child function requires two extra parameters called
1659 __low and __high that are set the by Cilk runtime when it calls this
1660 function. */
1661 if (cilk_for_count)
1663 t = build_decl (DECL_SOURCE_LOCATION (decl),
1664 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1665 DECL_ARTIFICIAL (t) = 1;
1666 DECL_NAMELESS (t) = 1;
1667 DECL_ARG_TYPE (t) = ptr_type_node;
1668 DECL_CONTEXT (t) = current_function_decl;
1669 TREE_USED (t) = 1;
1670 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1671 DECL_ARGUMENTS (decl) = t;
1673 t = build_decl (DECL_SOURCE_LOCATION (decl),
1674 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1675 DECL_ARTIFICIAL (t) = 1;
1676 DECL_NAMELESS (t) = 1;
1677 DECL_ARG_TYPE (t) = ptr_type_node;
1678 DECL_CONTEXT (t) = current_function_decl;
1679 TREE_USED (t) = 1;
1680 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1681 DECL_ARGUMENTS (decl) = t;
1684 tree data_name = get_identifier (".omp_data_i");
1685 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1686 ptr_type_node);
1687 DECL_ARTIFICIAL (t) = 1;
1688 DECL_NAMELESS (t) = 1;
1689 DECL_ARG_TYPE (t) = ptr_type_node;
1690 DECL_CONTEXT (t) = current_function_decl;
1691 TREE_USED (t) = 1;
1692 TREE_READONLY (t) = 1;
1693 if (cilk_for_count)
1694 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1695 DECL_ARGUMENTS (decl) = t;
1696 if (!task_copy)
1697 ctx->receiver_decl = t;
1698 else
1700 t = build_decl (DECL_SOURCE_LOCATION (decl),
1701 PARM_DECL, get_identifier (".omp_data_o"),
1702 ptr_type_node);
1703 DECL_ARTIFICIAL (t) = 1;
1704 DECL_NAMELESS (t) = 1;
1705 DECL_ARG_TYPE (t) = ptr_type_node;
1706 DECL_CONTEXT (t) = current_function_decl;
1707 TREE_USED (t) = 1;
1708 TREE_ADDRESSABLE (t) = 1;
1709 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1710 DECL_ARGUMENTS (decl) = t;
1713 /* Allocate memory for the function structure. The call to
1714 allocate_struct_function clobbers CFUN, so we need to restore
1715 it afterward. */
1716 push_struct_function (decl);
1717 cfun->function_end_locus = gimple_location (ctx->stmt);
1718 init_tree_ssa (cfun);
1719 pop_cfun ();
1722 /* Callback for walk_gimple_seq. Check if combined parallel
1723 contains gimple_omp_for_combined_into_p OMP_FOR. */
1725 tree
1726 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1727 bool *handled_ops_p,
1728 struct walk_stmt_info *wi)
1730 gimple *stmt = gsi_stmt (*gsi_p);
1732 *handled_ops_p = true;
1733 switch (gimple_code (stmt))
1735 WALK_SUBSTMTS;
1737 case GIMPLE_OMP_FOR:
1738 if (gimple_omp_for_combined_into_p (stmt)
1739 && gimple_omp_for_kind (stmt)
1740 == *(const enum gf_mask *) (wi->info))
1742 wi->info = stmt;
1743 return integer_zero_node;
1745 break;
1746 default:
1747 break;
1749 return NULL;
1752 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1754 static void
1755 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1756 omp_context *outer_ctx)
1758 struct walk_stmt_info wi;
1760 memset (&wi, 0, sizeof (wi));
1761 wi.val_only = true;
1762 wi.info = (void *) &msk;
1763 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1764 if (wi.info != (void *) &msk)
1766 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1767 struct omp_for_data fd;
1768 omp_extract_for_data (for_stmt, &fd, NULL);
1769 /* We need two temporaries with fd.loop.v type (istart/iend)
1770 and then (fd.collapse - 1) temporaries with the same
1771 type for count2 ... countN-1 vars if not constant. */
1772 size_t count = 2, i;
1773 tree type = fd.iter_type;
1774 if (fd.collapse > 1
1775 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1777 count += fd.collapse - 1;
1778 /* If there are lastprivate clauses on the inner
1779 GIMPLE_OMP_FOR, add one more temporaries for the total number
1780 of iterations (product of count1 ... countN-1). */
1781 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1782 OMP_CLAUSE_LASTPRIVATE))
1783 count++;
1784 else if (msk == GF_OMP_FOR_KIND_FOR
1785 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1786 OMP_CLAUSE_LASTPRIVATE))
1787 count++;
1789 for (i = 0; i < count; i++)
1791 tree temp = create_tmp_var (type);
1792 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1793 insert_decl_map (&outer_ctx->cb, temp, temp);
1794 OMP_CLAUSE_DECL (c) = temp;
1795 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1796 gimple_omp_taskreg_set_clauses (stmt, c);
1801 /* Scan an OpenMP parallel directive. */
1803 static void
1804 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 omp_context *ctx;
1807 tree name;
1808 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1810 /* Ignore parallel directives with empty bodies, unless there
1811 are copyin clauses. */
1812 if (optimize > 0
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1815 OMP_CLAUSE_COPYIN) == NULL)
1817 gsi_replace (gsi, gimple_build_nop (), false);
1818 return;
1821 if (gimple_omp_parallel_combined_p (stmt))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1824 ctx = new_omp_context (stmt, outer_ctx);
1825 taskreg_contexts.safe_push (ctx);
1826 if (taskreg_nesting_level > 1)
1827 ctx->is_nested = true;
1828 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1829 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1830 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1831 name = create_tmp_var_name (".omp_data_s");
1832 name = build_decl (gimple_location (stmt),
1833 TYPE_DECL, name, ctx->record_type);
1834 DECL_ARTIFICIAL (name) = 1;
1835 DECL_NAMELESS (name) = 1;
1836 TYPE_NAME (ctx->record_type) = name;
1837 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1838 if (!gimple_omp_parallel_grid_phony (stmt))
1840 create_omp_child_function (ctx, false);
1841 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1844 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1845 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1847 if (TYPE_FIELDS (ctx->record_type) == NULL)
1848 ctx->record_type = ctx->receiver_decl = NULL;
1851 /* Scan an OpenMP task directive. */
1853 static void
1854 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1856 omp_context *ctx;
1857 tree name, t;
1858 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1860 /* Ignore task directives with empty bodies, unless they have depend
1861 clause. */
1862 if (optimize > 0
1863 && empty_body_p (gimple_omp_body (stmt))
1864 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1866 gsi_replace (gsi, gimple_build_nop (), false);
1867 return;
1870 if (gimple_omp_task_taskloop_p (stmt))
1871 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1873 ctx = new_omp_context (stmt, outer_ctx);
1874 taskreg_contexts.safe_push (ctx);
1875 if (taskreg_nesting_level > 1)
1876 ctx->is_nested = true;
1877 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1878 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1879 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1880 name = create_tmp_var_name (".omp_data_s");
1881 name = build_decl (gimple_location (stmt),
1882 TYPE_DECL, name, ctx->record_type);
1883 DECL_ARTIFICIAL (name) = 1;
1884 DECL_NAMELESS (name) = 1;
1885 TYPE_NAME (ctx->record_type) = name;
1886 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1887 create_omp_child_function (ctx, false);
1888 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1890 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1892 if (ctx->srecord_type)
1894 name = create_tmp_var_name (".omp_data_a");
1895 name = build_decl (gimple_location (stmt),
1896 TYPE_DECL, name, ctx->srecord_type);
1897 DECL_ARTIFICIAL (name) = 1;
1898 DECL_NAMELESS (name) = 1;
1899 TYPE_NAME (ctx->srecord_type) = name;
1900 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1901 create_omp_child_function (ctx, true);
1904 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1906 if (TYPE_FIELDS (ctx->record_type) == NULL)
1908 ctx->record_type = ctx->receiver_decl = NULL;
1909 t = build_int_cst (long_integer_type_node, 0);
1910 gimple_omp_task_set_arg_size (stmt, t);
1911 t = build_int_cst (long_integer_type_node, 1);
1912 gimple_omp_task_set_arg_align (stmt, t);
1917 /* If any decls have been made addressable during scan_omp,
1918 adjust their fields if needed, and layout record types
1919 of parallel/task constructs. */
1921 static void
1922 finish_taskreg_scan (omp_context *ctx)
1924 if (ctx->record_type == NULL_TREE)
1925 return;
1927 /* If any task_shared_vars were needed, verify all
1928 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1929 statements if use_pointer_for_field hasn't changed
1930 because of that. If it did, update field types now. */
1931 if (task_shared_vars)
1933 tree c;
1935 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1936 c; c = OMP_CLAUSE_CHAIN (c))
1937 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1938 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1940 tree decl = OMP_CLAUSE_DECL (c);
1942 /* Global variables don't need to be copied,
1943 the receiver side will use them directly. */
1944 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1945 continue;
1946 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1947 || !use_pointer_for_field (decl, ctx))
1948 continue;
1949 tree field = lookup_field (decl, ctx);
1950 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1951 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1952 continue;
1953 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1954 TREE_THIS_VOLATILE (field) = 0;
1955 DECL_USER_ALIGN (field) = 0;
1956 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1957 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1958 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1959 if (ctx->srecord_type)
1961 tree sfield = lookup_sfield (decl, ctx);
1962 TREE_TYPE (sfield) = TREE_TYPE (field);
1963 TREE_THIS_VOLATILE (sfield) = 0;
1964 DECL_USER_ALIGN (sfield) = 0;
1965 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1966 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1967 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1972 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1974 layout_type (ctx->record_type);
1975 fixup_child_record_type (ctx);
1977 else
1979 location_t loc = gimple_location (ctx->stmt);
1980 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1981 /* Move VLA fields to the end. */
1982 p = &TYPE_FIELDS (ctx->record_type);
1983 while (*p)
1984 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1985 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1987 *q = *p;
1988 *p = TREE_CHAIN (*p);
1989 TREE_CHAIN (*q) = NULL_TREE;
1990 q = &TREE_CHAIN (*q);
1992 else
1993 p = &DECL_CHAIN (*p);
1994 *p = vla_fields;
1995 if (gimple_omp_task_taskloop_p (ctx->stmt))
1997 /* Move fields corresponding to first and second _looptemp_
1998 clause first. There are filled by GOMP_taskloop
1999 and thus need to be in specific positions. */
2000 tree c1 = gimple_omp_task_clauses (ctx->stmt);
2001 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2002 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2003 OMP_CLAUSE__LOOPTEMP_);
2004 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2005 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2006 p = &TYPE_FIELDS (ctx->record_type);
2007 while (*p)
2008 if (*p == f1 || *p == f2)
2009 *p = DECL_CHAIN (*p);
2010 else
2011 p = &DECL_CHAIN (*p);
2012 DECL_CHAIN (f1) = f2;
2013 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2014 TYPE_FIELDS (ctx->record_type) = f1;
2015 if (ctx->srecord_type)
2017 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2018 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2019 p = &TYPE_FIELDS (ctx->srecord_type);
2020 while (*p)
2021 if (*p == f1 || *p == f2)
2022 *p = DECL_CHAIN (*p);
2023 else
2024 p = &DECL_CHAIN (*p);
2025 DECL_CHAIN (f1) = f2;
2026 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2027 TYPE_FIELDS (ctx->srecord_type) = f1;
2030 layout_type (ctx->record_type);
2031 fixup_child_record_type (ctx);
2032 if (ctx->srecord_type)
2033 layout_type (ctx->srecord_type);
2034 tree t = fold_convert_loc (loc, long_integer_type_node,
2035 TYPE_SIZE_UNIT (ctx->record_type));
2036 gimple_omp_task_set_arg_size (ctx->stmt, t);
2037 t = build_int_cst (long_integer_type_node,
2038 TYPE_ALIGN_UNIT (ctx->record_type));
2039 gimple_omp_task_set_arg_align (ctx->stmt, t);
2043 /* Find the enclosing offload context. */
2045 static omp_context *
2046 enclosing_target_ctx (omp_context *ctx)
2048 for (; ctx; ctx = ctx->outer)
2049 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2050 break;
2052 return ctx;
2055 /* Return true if ctx is part of an oacc kernels region. */
2057 static bool
2058 ctx_in_oacc_kernels_region (omp_context *ctx)
2060 for (;ctx != NULL; ctx = ctx->outer)
2062 gimple *stmt = ctx->stmt;
2063 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2064 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2065 return true;
2068 return false;
2071 /* Check the parallelism clauses inside a kernels regions.
2072 Until kernels handling moves to use the same loop indirection
2073 scheme as parallel, we need to do this checking early. */
2075 static unsigned
2076 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2078 bool checking = true;
2079 unsigned outer_mask = 0;
2080 unsigned this_mask = 0;
2081 bool has_seq = false, has_auto = false;
2083 if (ctx->outer)
2084 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2085 if (!stmt)
2087 checking = false;
2088 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2089 return outer_mask;
2090 stmt = as_a <gomp_for *> (ctx->stmt);
2093 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2095 switch (OMP_CLAUSE_CODE (c))
2097 case OMP_CLAUSE_GANG:
2098 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2099 break;
2100 case OMP_CLAUSE_WORKER:
2101 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2102 break;
2103 case OMP_CLAUSE_VECTOR:
2104 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2105 break;
2106 case OMP_CLAUSE_SEQ:
2107 has_seq = true;
2108 break;
2109 case OMP_CLAUSE_AUTO:
2110 has_auto = true;
2111 break;
2112 default:
2113 break;
2117 if (checking)
2119 if (has_seq && (this_mask || has_auto))
2120 error_at (gimple_location (stmt), "%<seq%> overrides other"
2121 " OpenACC loop specifiers");
2122 else if (has_auto && this_mask)
2123 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2124 " OpenACC loop specifiers");
2126 if (this_mask & outer_mask)
2127 error_at (gimple_location (stmt), "inner loop uses same"
2128 " OpenACC parallelism as containing loop");
2131 return outer_mask | this_mask;
2134 /* Scan a GIMPLE_OMP_FOR. */
2136 static omp_context *
2137 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2139 omp_context *ctx;
2140 size_t i;
2141 tree clauses = gimple_omp_for_clauses (stmt);
2143 ctx = new_omp_context (stmt, outer_ctx);
2145 if (is_gimple_omp_oacc (stmt))
2147 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2149 if (!tgt || is_oacc_parallel (tgt))
2150 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2152 char const *check = NULL;
2154 switch (OMP_CLAUSE_CODE (c))
2156 case OMP_CLAUSE_GANG:
2157 check = "gang";
2158 break;
2160 case OMP_CLAUSE_WORKER:
2161 check = "worker";
2162 break;
2164 case OMP_CLAUSE_VECTOR:
2165 check = "vector";
2166 break;
2168 default:
2169 break;
2172 if (check && OMP_CLAUSE_OPERAND (c, 0))
2173 error_at (gimple_location (stmt),
2174 "argument not permitted on %qs clause in"
2175 " OpenACC %<parallel%>", check);
2178 if (tgt && is_oacc_kernels (tgt))
2180 /* Strip out reductions, as they are not handled yet. */
2181 tree *prev_ptr = &clauses;
2183 while (tree probe = *prev_ptr)
2185 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2187 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2188 *prev_ptr = *next_ptr;
2189 else
2190 prev_ptr = next_ptr;
2193 gimple_omp_for_set_clauses (stmt, clauses);
2194 check_oacc_kernel_gwv (stmt, ctx);
2198 scan_sharing_clauses (clauses, ctx);
2200 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2201 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2203 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2204 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2205 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2206 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2208 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2209 return ctx;
2212 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2214 static void
2215 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2216 omp_context *outer_ctx)
2218 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2219 gsi_replace (gsi, bind, false);
2220 gimple_seq seq = NULL;
2221 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2222 tree cond = create_tmp_var_raw (integer_type_node);
2223 DECL_CONTEXT (cond) = current_function_decl;
2224 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2225 gimple_bind_set_vars (bind, cond);
2226 gimple_call_set_lhs (g, cond);
2227 gimple_seq_add_stmt (&seq, g);
2228 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2229 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2230 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2231 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2232 gimple_seq_add_stmt (&seq, g);
2233 g = gimple_build_label (lab1);
2234 gimple_seq_add_stmt (&seq, g);
2235 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2236 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2237 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2238 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2239 gimple_omp_for_set_clauses (new_stmt, clause);
2240 gimple_seq_add_stmt (&seq, new_stmt);
2241 g = gimple_build_goto (lab3);
2242 gimple_seq_add_stmt (&seq, g);
2243 g = gimple_build_label (lab2);
2244 gimple_seq_add_stmt (&seq, g);
2245 gimple_seq_add_stmt (&seq, stmt);
2246 g = gimple_build_label (lab3);
2247 gimple_seq_add_stmt (&seq, g);
2248 gimple_bind_set_body (bind, seq);
2249 update_stmt (bind);
2250 scan_omp_for (new_stmt, outer_ctx);
2251 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2254 /* Scan an OpenMP sections directive. */
2256 static void
2257 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2259 omp_context *ctx;
2261 ctx = new_omp_context (stmt, outer_ctx);
2262 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2263 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2266 /* Scan an OpenMP single directive. */
2268 static void
2269 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2271 omp_context *ctx;
2272 tree name;
2274 ctx = new_omp_context (stmt, outer_ctx);
2275 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2276 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2277 name = create_tmp_var_name (".omp_copy_s");
2278 name = build_decl (gimple_location (stmt),
2279 TYPE_DECL, name, ctx->record_type);
2280 TYPE_NAME (ctx->record_type) = name;
2282 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2283 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2285 if (TYPE_FIELDS (ctx->record_type) == NULL)
2286 ctx->record_type = NULL;
2287 else
2288 layout_type (ctx->record_type);
2291 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2292 used in the corresponding offloaded function are restrict. */
2294 static bool
2295 omp_target_base_pointers_restrict_p (tree clauses)
2297 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2298 used by OpenACC. */
2299 if (flag_openacc == 0)
2300 return false;
2302 /* I. Basic example:
2304 void foo (void)
2306 unsigned int a[2], b[2];
2308 #pragma acc kernels \
2309 copyout (a) \
2310 copyout (b)
2312 a[0] = 0;
2313 b[0] = 1;
2317 After gimplification, we have:
2319 #pragma omp target oacc_kernels \
2320 map(force_from:a [len: 8]) \
2321 map(force_from:b [len: 8])
2323 a[0] = 0;
2324 b[0] = 1;
2327 Because both mappings have the force prefix, we know that they will be
2328 allocated when calling the corresponding offloaded function, which means we
2329 can mark the base pointers for a and b in the offloaded function as
2330 restrict. */
2332 tree c;
2333 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2335 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2336 return false;
2338 switch (OMP_CLAUSE_MAP_KIND (c))
2340 case GOMP_MAP_FORCE_ALLOC:
2341 case GOMP_MAP_FORCE_TO:
2342 case GOMP_MAP_FORCE_FROM:
2343 case GOMP_MAP_FORCE_TOFROM:
2344 break;
2345 default:
2346 return false;
2350 return true;
2353 /* Scan a GIMPLE_OMP_TARGET. */
2355 static void
2356 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2358 omp_context *ctx;
2359 tree name;
2360 bool offloaded = is_gimple_omp_offloaded (stmt);
2361 tree clauses = gimple_omp_target_clauses (stmt);
2363 ctx = new_omp_context (stmt, outer_ctx);
2364 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2365 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2366 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2367 name = create_tmp_var_name (".omp_data_t");
2368 name = build_decl (gimple_location (stmt),
2369 TYPE_DECL, name, ctx->record_type);
2370 DECL_ARTIFICIAL (name) = 1;
2371 DECL_NAMELESS (name) = 1;
2372 TYPE_NAME (ctx->record_type) = name;
2373 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2375 bool base_pointers_restrict = false;
2376 if (offloaded)
2378 create_omp_child_function (ctx, false);
2379 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2381 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2382 if (base_pointers_restrict
2383 && dump_file && (dump_flags & TDF_DETAILS))
2384 fprintf (dump_file,
2385 "Base pointers in offloaded function are restrict\n");
2388 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2389 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2391 if (TYPE_FIELDS (ctx->record_type) == NULL)
2392 ctx->record_type = ctx->receiver_decl = NULL;
2393 else
2395 TYPE_FIELDS (ctx->record_type)
2396 = nreverse (TYPE_FIELDS (ctx->record_type));
2397 if (flag_checking)
2399 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2400 for (tree field = TYPE_FIELDS (ctx->record_type);
2401 field;
2402 field = DECL_CHAIN (field))
2403 gcc_assert (DECL_ALIGN (field) == align);
2405 layout_type (ctx->record_type);
2406 if (offloaded)
2407 fixup_child_record_type (ctx);
2411 /* Scan an OpenMP teams directive. */
2413 static void
2414 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2416 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2417 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2418 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2421 /* Check nesting restrictions. */
2422 static bool
2423 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2425 tree c;
2427 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2428 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2429 the original copy of its contents. */
2430 return true;
2432 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2433 inside an OpenACC CTX. */
2434 if (!(is_gimple_omp (stmt)
2435 && is_gimple_omp_oacc (stmt))
2436 /* Except for atomic codes that we share with OpenMP. */
2437 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2438 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2440 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2442 error_at (gimple_location (stmt),
2443 "non-OpenACC construct inside of OpenACC routine");
2444 return false;
2446 else
2447 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2448 if (is_gimple_omp (octx->stmt)
2449 && is_gimple_omp_oacc (octx->stmt))
2451 error_at (gimple_location (stmt),
2452 "non-OpenACC construct inside of OpenACC region");
2453 return false;
2457 if (ctx != NULL)
2459 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2460 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2462 c = NULL_TREE;
2463 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2465 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2466 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2468 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2469 && (ctx->outer == NULL
2470 || !gimple_omp_for_combined_into_p (ctx->stmt)
2471 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2472 || (gimple_omp_for_kind (ctx->outer->stmt)
2473 != GF_OMP_FOR_KIND_FOR)
2474 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2476 error_at (gimple_location (stmt),
2477 "%<ordered simd threads%> must be closely "
2478 "nested inside of %<for simd%> region");
2479 return false;
2481 return true;
2484 error_at (gimple_location (stmt),
2485 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2486 " may not be nested inside %<simd%> region");
2487 return false;
2489 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2491 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2492 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2493 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2494 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2496 error_at (gimple_location (stmt),
2497 "only %<distribute%> or %<parallel%> regions are "
2498 "allowed to be strictly nested inside %<teams%> "
2499 "region");
2500 return false;
2504 switch (gimple_code (stmt))
2506 case GIMPLE_OMP_FOR:
2507 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2508 return true;
2509 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2511 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2513 error_at (gimple_location (stmt),
2514 "%<distribute%> region must be strictly nested "
2515 "inside %<teams%> construct");
2516 return false;
2518 return true;
2520 /* We split taskloop into task and nested taskloop in it. */
2521 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2522 return true;
2523 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2525 bool ok = false;
2527 if (ctx)
2528 switch (gimple_code (ctx->stmt))
2530 case GIMPLE_OMP_FOR:
2531 ok = (gimple_omp_for_kind (ctx->stmt)
2532 == GF_OMP_FOR_KIND_OACC_LOOP);
2533 break;
2535 case GIMPLE_OMP_TARGET:
2536 switch (gimple_omp_target_kind (ctx->stmt))
2538 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2539 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2540 ok = true;
2541 break;
2543 default:
2544 break;
2547 default:
2548 break;
2550 else if (oacc_get_fn_attrib (current_function_decl))
2551 ok = true;
2552 if (!ok)
2554 error_at (gimple_location (stmt),
2555 "OpenACC loop directive must be associated with"
2556 " an OpenACC compute region");
2557 return false;
2560 /* FALLTHRU */
2561 case GIMPLE_CALL:
2562 if (is_gimple_call (stmt)
2563 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2564 == BUILT_IN_GOMP_CANCEL
2565 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2566 == BUILT_IN_GOMP_CANCELLATION_POINT))
2568 const char *bad = NULL;
2569 const char *kind = NULL;
2570 const char *construct
2571 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2572 == BUILT_IN_GOMP_CANCEL)
2573 ? "#pragma omp cancel"
2574 : "#pragma omp cancellation point";
2575 if (ctx == NULL)
2577 error_at (gimple_location (stmt), "orphaned %qs construct",
2578 construct);
2579 return false;
2581 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2582 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2583 : 0)
2585 case 1:
2586 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2587 bad = "#pragma omp parallel";
2588 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2589 == BUILT_IN_GOMP_CANCEL
2590 && !integer_zerop (gimple_call_arg (stmt, 1)))
2591 ctx->cancellable = true;
2592 kind = "parallel";
2593 break;
2594 case 2:
2595 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2596 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2597 bad = "#pragma omp for";
2598 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2599 == BUILT_IN_GOMP_CANCEL
2600 && !integer_zerop (gimple_call_arg (stmt, 1)))
2602 ctx->cancellable = true;
2603 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2604 OMP_CLAUSE_NOWAIT))
2605 warning_at (gimple_location (stmt), 0,
2606 "%<#pragma omp cancel for%> inside "
2607 "%<nowait%> for construct");
2608 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2609 OMP_CLAUSE_ORDERED))
2610 warning_at (gimple_location (stmt), 0,
2611 "%<#pragma omp cancel for%> inside "
2612 "%<ordered%> for construct");
2614 kind = "for";
2615 break;
2616 case 4:
2617 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2618 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2619 bad = "#pragma omp sections";
2620 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2621 == BUILT_IN_GOMP_CANCEL
2622 && !integer_zerop (gimple_call_arg (stmt, 1)))
2624 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2626 ctx->cancellable = true;
2627 if (omp_find_clause (gimple_omp_sections_clauses
2628 (ctx->stmt),
2629 OMP_CLAUSE_NOWAIT))
2630 warning_at (gimple_location (stmt), 0,
2631 "%<#pragma omp cancel sections%> inside "
2632 "%<nowait%> sections construct");
2634 else
2636 gcc_assert (ctx->outer
2637 && gimple_code (ctx->outer->stmt)
2638 == GIMPLE_OMP_SECTIONS);
2639 ctx->outer->cancellable = true;
2640 if (omp_find_clause (gimple_omp_sections_clauses
2641 (ctx->outer->stmt),
2642 OMP_CLAUSE_NOWAIT))
2643 warning_at (gimple_location (stmt), 0,
2644 "%<#pragma omp cancel sections%> inside "
2645 "%<nowait%> sections construct");
2648 kind = "sections";
2649 break;
2650 case 8:
2651 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2652 bad = "#pragma omp task";
2653 else
2655 for (omp_context *octx = ctx->outer;
2656 octx; octx = octx->outer)
2658 switch (gimple_code (octx->stmt))
2660 case GIMPLE_OMP_TASKGROUP:
2661 break;
2662 case GIMPLE_OMP_TARGET:
2663 if (gimple_omp_target_kind (octx->stmt)
2664 != GF_OMP_TARGET_KIND_REGION)
2665 continue;
2666 /* FALLTHRU */
2667 case GIMPLE_OMP_PARALLEL:
2668 case GIMPLE_OMP_TEAMS:
2669 error_at (gimple_location (stmt),
2670 "%<%s taskgroup%> construct not closely "
2671 "nested inside of %<taskgroup%> region",
2672 construct);
2673 return false;
2674 default:
2675 continue;
2677 break;
2679 ctx->cancellable = true;
2681 kind = "taskgroup";
2682 break;
2683 default:
2684 error_at (gimple_location (stmt), "invalid arguments");
2685 return false;
2687 if (bad)
2689 error_at (gimple_location (stmt),
2690 "%<%s %s%> construct not closely nested inside of %qs",
2691 construct, kind, bad);
2692 return false;
2695 /* FALLTHRU */
2696 case GIMPLE_OMP_SECTIONS:
2697 case GIMPLE_OMP_SINGLE:
2698 for (; ctx != NULL; ctx = ctx->outer)
2699 switch (gimple_code (ctx->stmt))
2701 case GIMPLE_OMP_FOR:
2702 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2703 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2704 break;
2705 /* FALLTHRU */
2706 case GIMPLE_OMP_SECTIONS:
2707 case GIMPLE_OMP_SINGLE:
2708 case GIMPLE_OMP_ORDERED:
2709 case GIMPLE_OMP_MASTER:
2710 case GIMPLE_OMP_TASK:
2711 case GIMPLE_OMP_CRITICAL:
2712 if (is_gimple_call (stmt))
2714 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2715 != BUILT_IN_GOMP_BARRIER)
2716 return true;
2717 error_at (gimple_location (stmt),
2718 "barrier region may not be closely nested inside "
2719 "of work-sharing, %<critical%>, %<ordered%>, "
2720 "%<master%>, explicit %<task%> or %<taskloop%> "
2721 "region");
2722 return false;
2724 error_at (gimple_location (stmt),
2725 "work-sharing region may not be closely nested inside "
2726 "of work-sharing, %<critical%>, %<ordered%>, "
2727 "%<master%>, explicit %<task%> or %<taskloop%> region");
2728 return false;
2729 case GIMPLE_OMP_PARALLEL:
2730 case GIMPLE_OMP_TEAMS:
2731 return true;
2732 case GIMPLE_OMP_TARGET:
2733 if (gimple_omp_target_kind (ctx->stmt)
2734 == GF_OMP_TARGET_KIND_REGION)
2735 return true;
2736 break;
2737 default:
2738 break;
2740 break;
2741 case GIMPLE_OMP_MASTER:
2742 for (; ctx != NULL; ctx = ctx->outer)
2743 switch (gimple_code (ctx->stmt))
2745 case GIMPLE_OMP_FOR:
2746 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2747 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2748 break;
2749 /* FALLTHRU */
2750 case GIMPLE_OMP_SECTIONS:
2751 case GIMPLE_OMP_SINGLE:
2752 case GIMPLE_OMP_TASK:
2753 error_at (gimple_location (stmt),
2754 "%<master%> region may not be closely nested inside "
2755 "of work-sharing, explicit %<task%> or %<taskloop%> "
2756 "region");
2757 return false;
2758 case GIMPLE_OMP_PARALLEL:
2759 case GIMPLE_OMP_TEAMS:
2760 return true;
2761 case GIMPLE_OMP_TARGET:
2762 if (gimple_omp_target_kind (ctx->stmt)
2763 == GF_OMP_TARGET_KIND_REGION)
2764 return true;
2765 break;
2766 default:
2767 break;
2769 break;
2770 case GIMPLE_OMP_TASK:
2771 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2772 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2773 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2774 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2776 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2777 error_at (OMP_CLAUSE_LOCATION (c),
2778 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2779 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2780 return false;
2782 break;
2783 case GIMPLE_OMP_ORDERED:
2784 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2785 c; c = OMP_CLAUSE_CHAIN (c))
2787 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2789 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2790 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2791 continue;
2793 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2794 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2795 || kind == OMP_CLAUSE_DEPEND_SINK)
2797 tree oclause;
2798 /* Look for containing ordered(N) loop. */
2799 if (ctx == NULL
2800 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2801 || (oclause
2802 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2803 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2805 error_at (OMP_CLAUSE_LOCATION (c),
2806 "%<ordered%> construct with %<depend%> clause "
2807 "must be closely nested inside an %<ordered%> "
2808 "loop");
2809 return false;
2811 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2813 error_at (OMP_CLAUSE_LOCATION (c),
2814 "%<ordered%> construct with %<depend%> clause "
2815 "must be closely nested inside a loop with "
2816 "%<ordered%> clause with a parameter");
2817 return false;
2820 else
2822 error_at (OMP_CLAUSE_LOCATION (c),
2823 "invalid depend kind in omp %<ordered%> %<depend%>");
2824 return false;
2827 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2828 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2830 /* ordered simd must be closely nested inside of simd region,
2831 and simd region must not encounter constructs other than
2832 ordered simd, therefore ordered simd may be either orphaned,
2833 or ctx->stmt must be simd. The latter case is handled already
2834 earlier. */
2835 if (ctx != NULL)
2837 error_at (gimple_location (stmt),
2838 "%<ordered%> %<simd%> must be closely nested inside "
2839 "%<simd%> region");
2840 return false;
2843 for (; ctx != NULL; ctx = ctx->outer)
2844 switch (gimple_code (ctx->stmt))
2846 case GIMPLE_OMP_CRITICAL:
2847 case GIMPLE_OMP_TASK:
2848 case GIMPLE_OMP_ORDERED:
2849 ordered_in_taskloop:
2850 error_at (gimple_location (stmt),
2851 "%<ordered%> region may not be closely nested inside "
2852 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2853 "%<taskloop%> region");
2854 return false;
2855 case GIMPLE_OMP_FOR:
2856 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2857 goto ordered_in_taskloop;
2858 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2859 OMP_CLAUSE_ORDERED) == NULL)
2861 error_at (gimple_location (stmt),
2862 "%<ordered%> region must be closely nested inside "
2863 "a loop region with an %<ordered%> clause");
2864 return false;
2866 return true;
2867 case GIMPLE_OMP_TARGET:
2868 if (gimple_omp_target_kind (ctx->stmt)
2869 != GF_OMP_TARGET_KIND_REGION)
2870 break;
2871 /* FALLTHRU */
2872 case GIMPLE_OMP_PARALLEL:
2873 case GIMPLE_OMP_TEAMS:
2874 error_at (gimple_location (stmt),
2875 "%<ordered%> region must be closely nested inside "
2876 "a loop region with an %<ordered%> clause");
2877 return false;
2878 default:
2879 break;
2881 break;
2882 case GIMPLE_OMP_CRITICAL:
2884 tree this_stmt_name
2885 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2886 for (; ctx != NULL; ctx = ctx->outer)
2887 if (gomp_critical *other_crit
2888 = dyn_cast <gomp_critical *> (ctx->stmt))
2889 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2891 error_at (gimple_location (stmt),
2892 "%<critical%> region may not be nested inside "
2893 "a %<critical%> region with the same name");
2894 return false;
2897 break;
2898 case GIMPLE_OMP_TEAMS:
2899 if (ctx == NULL
2900 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2901 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2903 error_at (gimple_location (stmt),
2904 "%<teams%> construct not closely nested inside of "
2905 "%<target%> construct");
2906 return false;
2908 break;
2909 case GIMPLE_OMP_TARGET:
2910 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2911 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2912 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2913 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2915 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2916 error_at (OMP_CLAUSE_LOCATION (c),
2917 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2918 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2919 return false;
2921 if (is_gimple_omp_offloaded (stmt)
2922 && oacc_get_fn_attrib (cfun->decl) != NULL)
2924 error_at (gimple_location (stmt),
2925 "OpenACC region inside of OpenACC routine, nested "
2926 "parallelism not supported yet");
2927 return false;
2929 for (; ctx != NULL; ctx = ctx->outer)
2931 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2933 if (is_gimple_omp (stmt)
2934 && is_gimple_omp_oacc (stmt)
2935 && is_gimple_omp (ctx->stmt))
2937 error_at (gimple_location (stmt),
2938 "OpenACC construct inside of non-OpenACC region");
2939 return false;
2941 continue;
2944 const char *stmt_name, *ctx_stmt_name;
2945 switch (gimple_omp_target_kind (stmt))
2947 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2948 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2949 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2950 case GF_OMP_TARGET_KIND_ENTER_DATA:
2951 stmt_name = "target enter data"; break;
2952 case GF_OMP_TARGET_KIND_EXIT_DATA:
2953 stmt_name = "target exit data"; break;
2954 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2955 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2956 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2957 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2958 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2959 stmt_name = "enter/exit data"; break;
2960 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2961 break;
2962 default: gcc_unreachable ();
2964 switch (gimple_omp_target_kind (ctx->stmt))
2966 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2967 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2968 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2969 ctx_stmt_name = "parallel"; break;
2970 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2971 ctx_stmt_name = "kernels"; break;
2972 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2973 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2974 ctx_stmt_name = "host_data"; break;
2975 default: gcc_unreachable ();
2978 /* OpenACC/OpenMP mismatch? */
2979 if (is_gimple_omp_oacc (stmt)
2980 != is_gimple_omp_oacc (ctx->stmt))
2982 error_at (gimple_location (stmt),
2983 "%s %qs construct inside of %s %qs region",
2984 (is_gimple_omp_oacc (stmt)
2985 ? "OpenACC" : "OpenMP"), stmt_name,
2986 (is_gimple_omp_oacc (ctx->stmt)
2987 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2988 return false;
2990 if (is_gimple_omp_offloaded (ctx->stmt))
2992 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2993 if (is_gimple_omp_oacc (ctx->stmt))
2995 error_at (gimple_location (stmt),
2996 "%qs construct inside of %qs region",
2997 stmt_name, ctx_stmt_name);
2998 return false;
3000 else
3002 warning_at (gimple_location (stmt), 0,
3003 "%qs construct inside of %qs region",
3004 stmt_name, ctx_stmt_name);
3008 break;
3009 default:
3010 break;
3012 return true;
3016 /* Helper function scan_omp.
3018 Callback for walk_tree or operators in walk_gimple_stmt used to
3019 scan for OMP directives in TP. */
3021 static tree
3022 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3024 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3025 omp_context *ctx = (omp_context *) wi->info;
3026 tree t = *tp;
3028 switch (TREE_CODE (t))
3030 case VAR_DECL:
3031 case PARM_DECL:
3032 case LABEL_DECL:
3033 case RESULT_DECL:
3034 if (ctx)
3036 tree repl = remap_decl (t, &ctx->cb);
3037 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3038 *tp = repl;
3040 break;
3042 default:
3043 if (ctx && TYPE_P (t))
3044 *tp = remap_type (t, &ctx->cb);
3045 else if (!DECL_P (t))
3047 *walk_subtrees = 1;
3048 if (ctx)
3050 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3051 if (tem != TREE_TYPE (t))
3053 if (TREE_CODE (t) == INTEGER_CST)
3054 *tp = wide_int_to_tree (tem, t);
3055 else
3056 TREE_TYPE (t) = tem;
3060 break;
3063 return NULL_TREE;
3066 /* Return true if FNDECL is a setjmp or a longjmp. */
3068 static bool
3069 setjmp_or_longjmp_p (const_tree fndecl)
3071 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3072 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3073 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3074 return true;
3076 tree declname = DECL_NAME (fndecl);
3077 if (!declname)
3078 return false;
3079 const char *name = IDENTIFIER_POINTER (declname);
3080 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3084 /* Helper function for scan_omp.
3086 Callback for walk_gimple_stmt used to scan for OMP directives in
3087 the current statement in GSI. */
3089 static tree
3090 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3091 struct walk_stmt_info *wi)
3093 gimple *stmt = gsi_stmt (*gsi);
3094 omp_context *ctx = (omp_context *) wi->info;
3096 if (gimple_has_location (stmt))
3097 input_location = gimple_location (stmt);
3099 /* Check the nesting restrictions. */
3100 bool remove = false;
3101 if (is_gimple_omp (stmt))
3102 remove = !check_omp_nesting_restrictions (stmt, ctx);
3103 else if (is_gimple_call (stmt))
3105 tree fndecl = gimple_call_fndecl (stmt);
3106 if (fndecl)
3108 if (setjmp_or_longjmp_p (fndecl)
3109 && ctx
3110 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3111 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3113 remove = true;
3114 error_at (gimple_location (stmt),
3115 "setjmp/longjmp inside simd construct");
3117 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3118 switch (DECL_FUNCTION_CODE (fndecl))
3120 case BUILT_IN_GOMP_BARRIER:
3121 case BUILT_IN_GOMP_CANCEL:
3122 case BUILT_IN_GOMP_CANCELLATION_POINT:
3123 case BUILT_IN_GOMP_TASKYIELD:
3124 case BUILT_IN_GOMP_TASKWAIT:
3125 case BUILT_IN_GOMP_TASKGROUP_START:
3126 case BUILT_IN_GOMP_TASKGROUP_END:
3127 remove = !check_omp_nesting_restrictions (stmt, ctx);
3128 break;
3129 default:
3130 break;
3134 if (remove)
3136 stmt = gimple_build_nop ();
3137 gsi_replace (gsi, stmt, false);
3140 *handled_ops_p = true;
3142 switch (gimple_code (stmt))
3144 case GIMPLE_OMP_PARALLEL:
3145 taskreg_nesting_level++;
3146 scan_omp_parallel (gsi, ctx);
3147 taskreg_nesting_level--;
3148 break;
3150 case GIMPLE_OMP_TASK:
3151 taskreg_nesting_level++;
3152 scan_omp_task (gsi, ctx);
3153 taskreg_nesting_level--;
3154 break;
3156 case GIMPLE_OMP_FOR:
3157 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3158 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3159 && omp_maybe_offloaded_ctx (ctx)
3160 && omp_max_simt_vf ())
3161 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3162 else
3163 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3164 break;
3166 case GIMPLE_OMP_SECTIONS:
3167 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3168 break;
3170 case GIMPLE_OMP_SINGLE:
3171 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3172 break;
3174 case GIMPLE_OMP_SECTION:
3175 case GIMPLE_OMP_MASTER:
3176 case GIMPLE_OMP_TASKGROUP:
3177 case GIMPLE_OMP_ORDERED:
3178 case GIMPLE_OMP_CRITICAL:
3179 case GIMPLE_OMP_GRID_BODY:
3180 ctx = new_omp_context (stmt, ctx);
3181 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3182 break;
3184 case GIMPLE_OMP_TARGET:
3185 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3186 break;
3188 case GIMPLE_OMP_TEAMS:
3189 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3190 break;
3192 case GIMPLE_BIND:
3194 tree var;
3196 *handled_ops_p = false;
3197 if (ctx)
3198 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3199 var ;
3200 var = DECL_CHAIN (var))
3201 insert_decl_map (&ctx->cb, var, var);
3203 break;
3204 default:
3205 *handled_ops_p = false;
3206 break;
3209 return NULL_TREE;
3213 /* Scan all the statements starting at the current statement. CTX
3214 contains context information about the OMP directives and
3215 clauses found during the scan. */
3217 static void
3218 scan_omp (gimple_seq *body_p, omp_context *ctx)
3220 location_t saved_location;
3221 struct walk_stmt_info wi;
3223 memset (&wi, 0, sizeof (wi));
3224 wi.info = ctx;
3225 wi.want_locations = true;
3227 saved_location = input_location;
3228 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3229 input_location = saved_location;
3232 /* Re-gimplification and code generation routines. */
3234 /* If a context was created for STMT when it was scanned, return it. */
3236 static omp_context *
3237 maybe_lookup_ctx (gimple *stmt)
3239 splay_tree_node n;
3240 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3241 return n ? (omp_context *) n->value : NULL;
3245 /* Find the mapping for DECL in CTX or the immediately enclosing
3246 context that has a mapping for DECL.
3248 If CTX is a nested parallel directive, we may have to use the decl
3249 mappings created in CTX's parent context. Suppose that we have the
3250 following parallel nesting (variable UIDs showed for clarity):
3252 iD.1562 = 0;
3253 #omp parallel shared(iD.1562) -> outer parallel
3254 iD.1562 = iD.1562 + 1;
3256 #omp parallel shared (iD.1562) -> inner parallel
3257 iD.1562 = iD.1562 - 1;
3259 Each parallel structure will create a distinct .omp_data_s structure
3260 for copying iD.1562 in/out of the directive:
3262 outer parallel .omp_data_s.1.i -> iD.1562
3263 inner parallel .omp_data_s.2.i -> iD.1562
3265 A shared variable mapping will produce a copy-out operation before
3266 the parallel directive and a copy-in operation after it. So, in
3267 this case we would have:
3269 iD.1562 = 0;
3270 .omp_data_o.1.i = iD.1562;
3271 #omp parallel shared(iD.1562) -> outer parallel
3272 .omp_data_i.1 = &.omp_data_o.1
3273 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3275 .omp_data_o.2.i = iD.1562; -> **
3276 #omp parallel shared(iD.1562) -> inner parallel
3277 .omp_data_i.2 = &.omp_data_o.2
3278 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3281 ** This is a problem. The symbol iD.1562 cannot be referenced
3282 inside the body of the outer parallel region. But since we are
3283 emitting this copy operation while expanding the inner parallel
3284 directive, we need to access the CTX structure of the outer
3285 parallel directive to get the correct mapping:
3287 .omp_data_o.2.i = .omp_data_i.1->i
3289 Since there may be other workshare or parallel directives enclosing
3290 the parallel directive, it may be necessary to walk up the context
3291 parent chain. This is not a problem in general because nested
3292 parallelism happens only rarely. */
3294 static tree
3295 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3297 tree t;
3298 omp_context *up;
3300 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3301 t = maybe_lookup_decl (decl, up);
3303 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3305 return t ? t : decl;
3309 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3310 in outer contexts. */
3312 static tree
3313 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3315 tree t = NULL;
3316 omp_context *up;
3318 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3319 t = maybe_lookup_decl (decl, up);
3321 return t ? t : decl;
3325 /* Construct the initialization value for reduction operation OP. */
3327 tree
3328 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3330 switch (op)
3332 case PLUS_EXPR:
3333 case MINUS_EXPR:
3334 case BIT_IOR_EXPR:
3335 case BIT_XOR_EXPR:
3336 case TRUTH_OR_EXPR:
3337 case TRUTH_ORIF_EXPR:
3338 case TRUTH_XOR_EXPR:
3339 case NE_EXPR:
3340 return build_zero_cst (type);
3342 case MULT_EXPR:
3343 case TRUTH_AND_EXPR:
3344 case TRUTH_ANDIF_EXPR:
3345 case EQ_EXPR:
3346 return fold_convert_loc (loc, type, integer_one_node);
3348 case BIT_AND_EXPR:
3349 return fold_convert_loc (loc, type, integer_minus_one_node);
3351 case MAX_EXPR:
3352 if (SCALAR_FLOAT_TYPE_P (type))
3354 REAL_VALUE_TYPE max, min;
3355 if (HONOR_INFINITIES (type))
3357 real_inf (&max);
3358 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3360 else
3361 real_maxval (&min, 1, TYPE_MODE (type));
3362 return build_real (type, min);
3364 else if (POINTER_TYPE_P (type))
3366 wide_int min
3367 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3368 return wide_int_to_tree (type, min);
3370 else
3372 gcc_assert (INTEGRAL_TYPE_P (type));
3373 return TYPE_MIN_VALUE (type);
3376 case MIN_EXPR:
3377 if (SCALAR_FLOAT_TYPE_P (type))
3379 REAL_VALUE_TYPE max;
3380 if (HONOR_INFINITIES (type))
3381 real_inf (&max);
3382 else
3383 real_maxval (&max, 0, TYPE_MODE (type));
3384 return build_real (type, max);
3386 else if (POINTER_TYPE_P (type))
3388 wide_int max
3389 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3390 return wide_int_to_tree (type, max);
3392 else
3394 gcc_assert (INTEGRAL_TYPE_P (type));
3395 return TYPE_MAX_VALUE (type);
3398 default:
3399 gcc_unreachable ();
3403 /* Construct the initialization value for reduction CLAUSE. */
3405 tree
3406 omp_reduction_init (tree clause, tree type)
3408 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3409 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3412 /* Return alignment to be assumed for var in CLAUSE, which should be
3413 OMP_CLAUSE_ALIGNED. */
3415 static tree
3416 omp_clause_aligned_alignment (tree clause)
3418 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3419 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3421 /* Otherwise return implementation defined alignment. */
3422 unsigned int al = 1;
3423 machine_mode mode, vmode;
3424 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3425 if (vs)
3426 vs = 1 << floor_log2 (vs);
3427 static enum mode_class classes[]
3428 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3429 for (int i = 0; i < 4; i += 2)
3430 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3431 mode != VOIDmode;
3432 mode = GET_MODE_WIDER_MODE (mode))
3434 vmode = targetm.vectorize.preferred_simd_mode (mode);
3435 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3436 continue;
3437 while (vs
3438 && GET_MODE_SIZE (vmode) < vs
3439 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3440 vmode = GET_MODE_2XWIDER_MODE (vmode);
3442 tree type = lang_hooks.types.type_for_mode (mode, 1);
3443 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3444 continue;
3445 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3446 / GET_MODE_SIZE (mode));
3447 if (TYPE_MODE (type) != vmode)
3448 continue;
3449 if (TYPE_ALIGN_UNIT (type) > al)
3450 al = TYPE_ALIGN_UNIT (type);
3452 return build_int_cst (integer_type_node, al);
3456 /* This structure is part of the interface between lower_rec_simd_input_clauses
3457 and lower_rec_input_clauses. */
3459 struct omplow_simd_context {
3460 tree idx;
3461 tree lane;
3462 vec<tree, va_heap> simt_eargs;
3463 gimple_seq simt_dlist;
3464 int max_vf;
3465 bool is_simt;
3468 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3469 privatization. */
3471 static bool
3472 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3473 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3475 if (sctx->max_vf == 0)
3477 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3478 if (sctx->max_vf > 1)
3480 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3481 OMP_CLAUSE_SAFELEN);
3482 if (c
3483 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3484 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3485 sctx->max_vf = 1;
3486 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3487 sctx->max_vf) == -1)
3488 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3490 if (sctx->max_vf > 1)
3492 sctx->idx = create_tmp_var (unsigned_type_node);
3493 sctx->lane = create_tmp_var (unsigned_type_node);
3496 if (sctx->max_vf == 1)
3497 return false;
3499 if (sctx->is_simt)
3501 if (is_gimple_reg (new_var))
3503 ivar = lvar = new_var;
3504 return true;
3506 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3507 ivar = lvar = create_tmp_var (type);
3508 TREE_ADDRESSABLE (ivar) = 1;
3509 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3510 NULL, DECL_ATTRIBUTES (ivar));
3511 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3512 tree clobber = build_constructor (type, NULL);
3513 TREE_THIS_VOLATILE (clobber) = 1;
3514 gimple *g = gimple_build_assign (ivar, clobber);
3515 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3517 else
3519 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3520 tree avar = create_tmp_var_raw (atype);
3521 if (TREE_ADDRESSABLE (new_var))
3522 TREE_ADDRESSABLE (avar) = 1;
3523 DECL_ATTRIBUTES (avar)
3524 = tree_cons (get_identifier ("omp simd array"), NULL,
3525 DECL_ATTRIBUTES (avar));
3526 gimple_add_tmp_var (avar);
3527 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3528 NULL_TREE, NULL_TREE);
3529 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3530 NULL_TREE, NULL_TREE);
3532 if (DECL_P (new_var))
3534 SET_DECL_VALUE_EXPR (new_var, lvar);
3535 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3537 return true;
3540 /* Helper function of lower_rec_input_clauses. For a reference
3541 in simd reduction, add an underlying variable it will reference. */
3543 static void
3544 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3546 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3547 if (TREE_CONSTANT (z))
3549 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3550 get_name (new_vard));
3551 gimple_add_tmp_var (z);
3552 TREE_ADDRESSABLE (z) = 1;
3553 z = build_fold_addr_expr_loc (loc, z);
3554 gimplify_assign (new_vard, z, ilist);
3558 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3559 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3560 private variables. Initialization statements go in ILIST, while calls
3561 to destructors go in DLIST. */
3563 static void
3564 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3565 omp_context *ctx, struct omp_for_data *fd)
3567 tree c, dtor, copyin_seq, x, ptr;
3568 bool copyin_by_ref = false;
3569 bool lastprivate_firstprivate = false;
3570 bool reduction_omp_orig_ref = false;
3571 int pass;
3572 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3573 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3574 omplow_simd_context sctx = omplow_simd_context ();
3575 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3576 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3577 gimple_seq llist[3] = { };
3579 copyin_seq = NULL;
3580 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3582 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3583 with data sharing clauses referencing variable sized vars. That
3584 is unnecessarily hard to support and very unlikely to result in
3585 vectorized code anyway. */
3586 if (is_simd)
3587 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3588 switch (OMP_CLAUSE_CODE (c))
3590 case OMP_CLAUSE_LINEAR:
3591 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3592 sctx.max_vf = 1;
3593 /* FALLTHRU */
3594 case OMP_CLAUSE_PRIVATE:
3595 case OMP_CLAUSE_FIRSTPRIVATE:
3596 case OMP_CLAUSE_LASTPRIVATE:
3597 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3598 sctx.max_vf = 1;
3599 break;
3600 case OMP_CLAUSE_REDUCTION:
3601 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3602 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3603 sctx.max_vf = 1;
3604 break;
3605 default:
3606 continue;
3609 /* Add a placeholder for simduid. */
3610 if (sctx.is_simt && sctx.max_vf != 1)
3611 sctx.simt_eargs.safe_push (NULL_TREE);
3613 /* Do all the fixed sized types in the first pass, and the variable sized
3614 types in the second pass. This makes sure that the scalar arguments to
3615 the variable sized types are processed before we use them in the
3616 variable sized operations. */
3617 for (pass = 0; pass < 2; ++pass)
3619 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3621 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3622 tree var, new_var;
3623 bool by_ref;
3624 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3626 switch (c_kind)
3628 case OMP_CLAUSE_PRIVATE:
3629 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3630 continue;
3631 break;
3632 case OMP_CLAUSE_SHARED:
3633 /* Ignore shared directives in teams construct. */
3634 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3635 continue;
3636 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3638 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3639 || is_global_var (OMP_CLAUSE_DECL (c)));
3640 continue;
3642 case OMP_CLAUSE_FIRSTPRIVATE:
3643 case OMP_CLAUSE_COPYIN:
3644 break;
3645 case OMP_CLAUSE_LINEAR:
3646 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3647 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3648 lastprivate_firstprivate = true;
3649 break;
3650 case OMP_CLAUSE_REDUCTION:
3651 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3652 reduction_omp_orig_ref = true;
3653 break;
3654 case OMP_CLAUSE__LOOPTEMP_:
3655 /* Handle _looptemp_ clauses only on parallel/task. */
3656 if (fd)
3657 continue;
3658 break;
3659 case OMP_CLAUSE_LASTPRIVATE:
3660 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3662 lastprivate_firstprivate = true;
3663 if (pass != 0 || is_taskloop_ctx (ctx))
3664 continue;
3666 /* Even without corresponding firstprivate, if
3667 decl is Fortran allocatable, it needs outer var
3668 reference. */
3669 else if (pass == 0
3670 && lang_hooks.decls.omp_private_outer_ref
3671 (OMP_CLAUSE_DECL (c)))
3672 lastprivate_firstprivate = true;
3673 break;
3674 case OMP_CLAUSE_ALIGNED:
3675 if (pass == 0)
3676 continue;
3677 var = OMP_CLAUSE_DECL (c);
3678 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3679 && !is_global_var (var))
3681 new_var = maybe_lookup_decl (var, ctx);
3682 if (new_var == NULL_TREE)
3683 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3684 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3685 tree alarg = omp_clause_aligned_alignment (c);
3686 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3687 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3688 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3689 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3690 gimplify_and_add (x, ilist);
3692 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3693 && is_global_var (var))
3695 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3696 new_var = lookup_decl (var, ctx);
3697 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3698 t = build_fold_addr_expr_loc (clause_loc, t);
3699 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3700 tree alarg = omp_clause_aligned_alignment (c);
3701 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3702 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3703 t = fold_convert_loc (clause_loc, ptype, t);
3704 x = create_tmp_var (ptype);
3705 t = build2 (MODIFY_EXPR, ptype, x, t);
3706 gimplify_and_add (t, ilist);
3707 t = build_simple_mem_ref_loc (clause_loc, x);
3708 SET_DECL_VALUE_EXPR (new_var, t);
3709 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3711 continue;
3712 default:
3713 continue;
3716 new_var = var = OMP_CLAUSE_DECL (c);
3717 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3719 var = TREE_OPERAND (var, 0);
3720 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3721 var = TREE_OPERAND (var, 0);
3722 if (TREE_CODE (var) == INDIRECT_REF
3723 || TREE_CODE (var) == ADDR_EXPR)
3724 var = TREE_OPERAND (var, 0);
3725 if (is_variable_sized (var))
3727 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3728 var = DECL_VALUE_EXPR (var);
3729 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3730 var = TREE_OPERAND (var, 0);
3731 gcc_assert (DECL_P (var));
3733 new_var = var;
3735 if (c_kind != OMP_CLAUSE_COPYIN)
3736 new_var = lookup_decl (var, ctx);
3738 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3740 if (pass != 0)
3741 continue;
3743 /* C/C++ array section reductions. */
3744 else if (c_kind == OMP_CLAUSE_REDUCTION
3745 && var != OMP_CLAUSE_DECL (c))
3747 if (pass == 0)
3748 continue;
3750 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3751 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3752 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3754 tree b = TREE_OPERAND (orig_var, 1);
3755 b = maybe_lookup_decl (b, ctx);
3756 if (b == NULL)
3758 b = TREE_OPERAND (orig_var, 1);
3759 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3761 if (integer_zerop (bias))
3762 bias = b;
3763 else
3765 bias = fold_convert_loc (clause_loc,
3766 TREE_TYPE (b), bias);
3767 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3768 TREE_TYPE (b), b, bias);
3770 orig_var = TREE_OPERAND (orig_var, 0);
3772 if (TREE_CODE (orig_var) == INDIRECT_REF
3773 || TREE_CODE (orig_var) == ADDR_EXPR)
3774 orig_var = TREE_OPERAND (orig_var, 0);
3775 tree d = OMP_CLAUSE_DECL (c);
3776 tree type = TREE_TYPE (d);
3777 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3778 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3779 const char *name = get_name (orig_var);
3780 if (TREE_CONSTANT (v))
3782 x = create_tmp_var_raw (type, name);
3783 gimple_add_tmp_var (x);
3784 TREE_ADDRESSABLE (x) = 1;
3785 x = build_fold_addr_expr_loc (clause_loc, x);
3787 else
3789 tree atmp
3790 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3791 tree t = maybe_lookup_decl (v, ctx);
3792 if (t)
3793 v = t;
3794 else
3795 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3796 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3797 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3798 TREE_TYPE (v), v,
3799 build_int_cst (TREE_TYPE (v), 1));
3800 t = fold_build2_loc (clause_loc, MULT_EXPR,
3801 TREE_TYPE (v), t,
3802 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3803 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3804 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3807 tree ptype = build_pointer_type (TREE_TYPE (type));
3808 x = fold_convert_loc (clause_loc, ptype, x);
3809 tree y = create_tmp_var (ptype, name);
3810 gimplify_assign (y, x, ilist);
3811 x = y;
3812 tree yb = y;
3814 if (!integer_zerop (bias))
3816 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3817 bias);
3818 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3820 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3821 pointer_sized_int_node, yb, bias);
3822 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3823 yb = create_tmp_var (ptype, name);
3824 gimplify_assign (yb, x, ilist);
3825 x = yb;
3828 d = TREE_OPERAND (d, 0);
3829 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3830 d = TREE_OPERAND (d, 0);
3831 if (TREE_CODE (d) == ADDR_EXPR)
3833 if (orig_var != var)
3835 gcc_assert (is_variable_sized (orig_var));
3836 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3838 gimplify_assign (new_var, x, ilist);
3839 tree new_orig_var = lookup_decl (orig_var, ctx);
3840 tree t = build_fold_indirect_ref (new_var);
3841 DECL_IGNORED_P (new_var) = 0;
3842 TREE_THIS_NOTRAP (t);
3843 SET_DECL_VALUE_EXPR (new_orig_var, t);
3844 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3846 else
3848 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3849 build_int_cst (ptype, 0));
3850 SET_DECL_VALUE_EXPR (new_var, x);
3851 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3854 else
3856 gcc_assert (orig_var == var);
3857 if (TREE_CODE (d) == INDIRECT_REF)
3859 x = create_tmp_var (ptype, name);
3860 TREE_ADDRESSABLE (x) = 1;
3861 gimplify_assign (x, yb, ilist);
3862 x = build_fold_addr_expr_loc (clause_loc, x);
3864 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3865 gimplify_assign (new_var, x, ilist);
3867 tree y1 = create_tmp_var (ptype, NULL);
3868 gimplify_assign (y1, y, ilist);
3869 tree i2 = NULL_TREE, y2 = NULL_TREE;
3870 tree body2 = NULL_TREE, end2 = NULL_TREE;
3871 tree y3 = NULL_TREE, y4 = NULL_TREE;
3872 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3874 y2 = create_tmp_var (ptype, NULL);
3875 gimplify_assign (y2, y, ilist);
3876 tree ref = build_outer_var_ref (var, ctx);
3877 /* For ref build_outer_var_ref already performs this. */
3878 if (TREE_CODE (d) == INDIRECT_REF)
3879 gcc_assert (omp_is_reference (var));
3880 else if (TREE_CODE (d) == ADDR_EXPR)
3881 ref = build_fold_addr_expr (ref);
3882 else if (omp_is_reference (var))
3883 ref = build_fold_addr_expr (ref);
3884 ref = fold_convert_loc (clause_loc, ptype, ref);
3885 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3886 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3888 y3 = create_tmp_var (ptype, NULL);
3889 gimplify_assign (y3, unshare_expr (ref), ilist);
3891 if (is_simd)
3893 y4 = create_tmp_var (ptype, NULL);
3894 gimplify_assign (y4, ref, dlist);
3897 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3898 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3899 tree body = create_artificial_label (UNKNOWN_LOCATION);
3900 tree end = create_artificial_label (UNKNOWN_LOCATION);
3901 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3902 if (y2)
3904 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3905 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3906 body2 = create_artificial_label (UNKNOWN_LOCATION);
3907 end2 = create_artificial_label (UNKNOWN_LOCATION);
3908 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3910 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3912 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3913 tree decl_placeholder
3914 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3915 SET_DECL_VALUE_EXPR (decl_placeholder,
3916 build_simple_mem_ref (y1));
3917 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3918 SET_DECL_VALUE_EXPR (placeholder,
3919 y3 ? build_simple_mem_ref (y3)
3920 : error_mark_node);
3921 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3922 x = lang_hooks.decls.omp_clause_default_ctor
3923 (c, build_simple_mem_ref (y1),
3924 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3925 if (x)
3926 gimplify_and_add (x, ilist);
3927 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3929 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3930 lower_omp (&tseq, ctx);
3931 gimple_seq_add_seq (ilist, tseq);
3933 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3934 if (is_simd)
3936 SET_DECL_VALUE_EXPR (decl_placeholder,
3937 build_simple_mem_ref (y2));
3938 SET_DECL_VALUE_EXPR (placeholder,
3939 build_simple_mem_ref (y4));
3940 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3941 lower_omp (&tseq, ctx);
3942 gimple_seq_add_seq (dlist, tseq);
3943 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3945 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3946 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3947 x = lang_hooks.decls.omp_clause_dtor
3948 (c, build_simple_mem_ref (y2));
3949 if (x)
3951 gimple_seq tseq = NULL;
3952 dtor = x;
3953 gimplify_stmt (&dtor, &tseq);
3954 gimple_seq_add_seq (dlist, tseq);
3957 else
3959 x = omp_reduction_init (c, TREE_TYPE (type));
3960 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3962 /* reduction(-:var) sums up the partial results, so it
3963 acts identically to reduction(+:var). */
3964 if (code == MINUS_EXPR)
3965 code = PLUS_EXPR;
3967 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3968 if (is_simd)
3970 x = build2 (code, TREE_TYPE (type),
3971 build_simple_mem_ref (y4),
3972 build_simple_mem_ref (y2));
3973 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3976 gimple *g
3977 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3978 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3979 gimple_seq_add_stmt (ilist, g);
3980 if (y3)
3982 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3983 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3984 gimple_seq_add_stmt (ilist, g);
3986 g = gimple_build_assign (i, PLUS_EXPR, i,
3987 build_int_cst (TREE_TYPE (i), 1));
3988 gimple_seq_add_stmt (ilist, g);
3989 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3990 gimple_seq_add_stmt (ilist, g);
3991 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3992 if (y2)
3994 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3995 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3996 gimple_seq_add_stmt (dlist, g);
3997 if (y4)
3999 g = gimple_build_assign
4000 (y4, POINTER_PLUS_EXPR, y4,
4001 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4002 gimple_seq_add_stmt (dlist, g);
4004 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4005 build_int_cst (TREE_TYPE (i2), 1));
4006 gimple_seq_add_stmt (dlist, g);
4007 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4008 gimple_seq_add_stmt (dlist, g);
4009 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4011 continue;
4013 else if (is_variable_sized (var))
4015 /* For variable sized types, we need to allocate the
4016 actual storage here. Call alloca and store the
4017 result in the pointer decl that we created elsewhere. */
4018 if (pass == 0)
4019 continue;
4021 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4023 gcall *stmt;
4024 tree tmp, atmp;
4026 ptr = DECL_VALUE_EXPR (new_var);
4027 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4028 ptr = TREE_OPERAND (ptr, 0);
4029 gcc_assert (DECL_P (ptr));
4030 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4032 /* void *tmp = __builtin_alloca */
4033 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4034 stmt = gimple_build_call (atmp, 2, x,
4035 size_int (DECL_ALIGN (var)));
4036 tmp = create_tmp_var_raw (ptr_type_node);
4037 gimple_add_tmp_var (tmp);
4038 gimple_call_set_lhs (stmt, tmp);
4040 gimple_seq_add_stmt (ilist, stmt);
4042 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4043 gimplify_assign (ptr, x, ilist);
4046 else if (omp_is_reference (var))
4048 /* For references that are being privatized for Fortran,
4049 allocate new backing storage for the new pointer
4050 variable. This allows us to avoid changing all the
4051 code that expects a pointer to something that expects
4052 a direct variable. */
4053 if (pass == 0)
4054 continue;
4056 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4057 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4059 x = build_receiver_ref (var, false, ctx);
4060 x = build_fold_addr_expr_loc (clause_loc, x);
4062 else if (TREE_CONSTANT (x))
4064 /* For reduction in SIMD loop, defer adding the
4065 initialization of the reference, because if we decide
4066 to use SIMD array for it, the initilization could cause
4067 expansion ICE. */
4068 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4069 x = NULL_TREE;
4070 else
4072 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4073 get_name (var));
4074 gimple_add_tmp_var (x);
4075 TREE_ADDRESSABLE (x) = 1;
4076 x = build_fold_addr_expr_loc (clause_loc, x);
4079 else
4081 tree atmp
4082 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4083 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4084 tree al = size_int (TYPE_ALIGN (rtype));
4085 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4088 if (x)
4090 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4091 gimplify_assign (new_var, x, ilist);
4094 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4096 else if (c_kind == OMP_CLAUSE_REDUCTION
4097 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4099 if (pass == 0)
4100 continue;
4102 else if (pass != 0)
4103 continue;
4105 switch (OMP_CLAUSE_CODE (c))
4107 case OMP_CLAUSE_SHARED:
4108 /* Ignore shared directives in teams construct. */
4109 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4110 continue;
4111 /* Shared global vars are just accessed directly. */
4112 if (is_global_var (new_var))
4113 break;
4114 /* For taskloop firstprivate/lastprivate, represented
4115 as firstprivate and shared clause on the task, new_var
4116 is the firstprivate var. */
4117 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4118 break;
4119 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4120 needs to be delayed until after fixup_child_record_type so
4121 that we get the correct type during the dereference. */
4122 by_ref = use_pointer_for_field (var, ctx);
4123 x = build_receiver_ref (var, by_ref, ctx);
4124 SET_DECL_VALUE_EXPR (new_var, x);
4125 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4127 /* ??? If VAR is not passed by reference, and the variable
4128 hasn't been initialized yet, then we'll get a warning for
4129 the store into the omp_data_s structure. Ideally, we'd be
4130 able to notice this and not store anything at all, but
4131 we're generating code too early. Suppress the warning. */
4132 if (!by_ref)
4133 TREE_NO_WARNING (var) = 1;
4134 break;
4136 case OMP_CLAUSE_LASTPRIVATE:
4137 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4138 break;
4139 /* FALLTHRU */
4141 case OMP_CLAUSE_PRIVATE:
4142 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4143 x = build_outer_var_ref (var, ctx);
4144 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4146 if (is_task_ctx (ctx))
4147 x = build_receiver_ref (var, false, ctx);
4148 else
4149 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4151 else
4152 x = NULL;
4153 do_private:
4154 tree nx;
4155 nx = lang_hooks.decls.omp_clause_default_ctor
4156 (c, unshare_expr (new_var), x);
4157 if (is_simd)
4159 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4160 if ((TREE_ADDRESSABLE (new_var) || nx || y
4161 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4162 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4163 ivar, lvar))
4165 if (nx)
4166 x = lang_hooks.decls.omp_clause_default_ctor
4167 (c, unshare_expr (ivar), x);
4168 if (nx && x)
4169 gimplify_and_add (x, &llist[0]);
4170 if (y)
4172 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4173 if (y)
4175 gimple_seq tseq = NULL;
4177 dtor = y;
4178 gimplify_stmt (&dtor, &tseq);
4179 gimple_seq_add_seq (&llist[1], tseq);
4182 break;
4185 if (nx)
4186 gimplify_and_add (nx, ilist);
4187 /* FALLTHRU */
4189 do_dtor:
4190 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4191 if (x)
4193 gimple_seq tseq = NULL;
4195 dtor = x;
4196 gimplify_stmt (&dtor, &tseq);
4197 gimple_seq_add_seq (dlist, tseq);
4199 break;
4201 case OMP_CLAUSE_LINEAR:
4202 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4203 goto do_firstprivate;
4204 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4205 x = NULL;
4206 else
4207 x = build_outer_var_ref (var, ctx);
4208 goto do_private;
4210 case OMP_CLAUSE_FIRSTPRIVATE:
4211 if (is_task_ctx (ctx))
4213 if (omp_is_reference (var) || is_variable_sized (var))
4214 goto do_dtor;
4215 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4216 ctx))
4217 || use_pointer_for_field (var, NULL))
4219 x = build_receiver_ref (var, false, ctx);
4220 SET_DECL_VALUE_EXPR (new_var, x);
4221 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4222 goto do_dtor;
4225 do_firstprivate:
4226 x = build_outer_var_ref (var, ctx);
4227 if (is_simd)
4229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4230 && gimple_omp_for_combined_into_p (ctx->stmt))
4232 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4233 tree stept = TREE_TYPE (t);
4234 tree ct = omp_find_clause (clauses,
4235 OMP_CLAUSE__LOOPTEMP_);
4236 gcc_assert (ct);
4237 tree l = OMP_CLAUSE_DECL (ct);
4238 tree n1 = fd->loop.n1;
4239 tree step = fd->loop.step;
4240 tree itype = TREE_TYPE (l);
4241 if (POINTER_TYPE_P (itype))
4242 itype = signed_type_for (itype);
4243 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4244 if (TYPE_UNSIGNED (itype)
4245 && fd->loop.cond_code == GT_EXPR)
4246 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4247 fold_build1 (NEGATE_EXPR, itype, l),
4248 fold_build1 (NEGATE_EXPR,
4249 itype, step));
4250 else
4251 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4252 t = fold_build2 (MULT_EXPR, stept,
4253 fold_convert (stept, l), t);
4255 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4257 x = lang_hooks.decls.omp_clause_linear_ctor
4258 (c, new_var, x, t);
4259 gimplify_and_add (x, ilist);
4260 goto do_dtor;
4263 if (POINTER_TYPE_P (TREE_TYPE (x)))
4264 x = fold_build2 (POINTER_PLUS_EXPR,
4265 TREE_TYPE (x), x, t);
4266 else
4267 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4270 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4271 || TREE_ADDRESSABLE (new_var))
4272 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4273 ivar, lvar))
4275 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4277 tree iv = create_tmp_var (TREE_TYPE (new_var));
4278 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4279 gimplify_and_add (x, ilist);
4280 gimple_stmt_iterator gsi
4281 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4282 gassign *g
4283 = gimple_build_assign (unshare_expr (lvar), iv);
4284 gsi_insert_before_without_update (&gsi, g,
4285 GSI_SAME_STMT);
4286 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4287 enum tree_code code = PLUS_EXPR;
4288 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4289 code = POINTER_PLUS_EXPR;
4290 g = gimple_build_assign (iv, code, iv, t);
4291 gsi_insert_before_without_update (&gsi, g,
4292 GSI_SAME_STMT);
4293 break;
4295 x = lang_hooks.decls.omp_clause_copy_ctor
4296 (c, unshare_expr (ivar), x);
4297 gimplify_and_add (x, &llist[0]);
4298 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4299 if (x)
4301 gimple_seq tseq = NULL;
4303 dtor = x;
4304 gimplify_stmt (&dtor, &tseq);
4305 gimple_seq_add_seq (&llist[1], tseq);
4307 break;
4310 x = lang_hooks.decls.omp_clause_copy_ctor
4311 (c, unshare_expr (new_var), x);
4312 gimplify_and_add (x, ilist);
4313 goto do_dtor;
4315 case OMP_CLAUSE__LOOPTEMP_:
4316 gcc_assert (is_taskreg_ctx (ctx));
4317 x = build_outer_var_ref (var, ctx);
4318 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4319 gimplify_and_add (x, ilist);
4320 break;
4322 case OMP_CLAUSE_COPYIN:
4323 by_ref = use_pointer_for_field (var, NULL);
4324 x = build_receiver_ref (var, by_ref, ctx);
4325 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4326 append_to_statement_list (x, &copyin_seq);
4327 copyin_by_ref |= by_ref;
4328 break;
4330 case OMP_CLAUSE_REDUCTION:
4331 /* OpenACC reductions are initialized using the
4332 GOACC_REDUCTION internal function. */
4333 if (is_gimple_omp_oacc (ctx->stmt))
4334 break;
4335 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4337 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4338 gimple *tseq;
4339 x = build_outer_var_ref (var, ctx);
4341 if (omp_is_reference (var)
4342 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4343 TREE_TYPE (x)))
4344 x = build_fold_addr_expr_loc (clause_loc, x);
4345 SET_DECL_VALUE_EXPR (placeholder, x);
4346 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4347 tree new_vard = new_var;
4348 if (omp_is_reference (var))
4350 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4351 new_vard = TREE_OPERAND (new_var, 0);
4352 gcc_assert (DECL_P (new_vard));
4354 if (is_simd
4355 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4356 ivar, lvar))
4358 if (new_vard == new_var)
4360 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4361 SET_DECL_VALUE_EXPR (new_var, ivar);
4363 else
4365 SET_DECL_VALUE_EXPR (new_vard,
4366 build_fold_addr_expr (ivar));
4367 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4369 x = lang_hooks.decls.omp_clause_default_ctor
4370 (c, unshare_expr (ivar),
4371 build_outer_var_ref (var, ctx));
4372 if (x)
4373 gimplify_and_add (x, &llist[0]);
4374 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4376 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4377 lower_omp (&tseq, ctx);
4378 gimple_seq_add_seq (&llist[0], tseq);
4380 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4381 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4382 lower_omp (&tseq, ctx);
4383 gimple_seq_add_seq (&llist[1], tseq);
4384 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4385 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4386 if (new_vard == new_var)
4387 SET_DECL_VALUE_EXPR (new_var, lvar);
4388 else
4389 SET_DECL_VALUE_EXPR (new_vard,
4390 build_fold_addr_expr (lvar));
4391 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4392 if (x)
4394 tseq = NULL;
4395 dtor = x;
4396 gimplify_stmt (&dtor, &tseq);
4397 gimple_seq_add_seq (&llist[1], tseq);
4399 break;
4401 /* If this is a reference to constant size reduction var
4402 with placeholder, we haven't emitted the initializer
4403 for it because it is undesirable if SIMD arrays are used.
4404 But if they aren't used, we need to emit the deferred
4405 initialization now. */
4406 else if (omp_is_reference (var) && is_simd)
4407 handle_simd_reference (clause_loc, new_vard, ilist);
4408 x = lang_hooks.decls.omp_clause_default_ctor
4409 (c, unshare_expr (new_var),
4410 build_outer_var_ref (var, ctx));
4411 if (x)
4412 gimplify_and_add (x, ilist);
4413 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4415 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4416 lower_omp (&tseq, ctx);
4417 gimple_seq_add_seq (ilist, tseq);
4419 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4420 if (is_simd)
4422 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4423 lower_omp (&tseq, ctx);
4424 gimple_seq_add_seq (dlist, tseq);
4425 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4427 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4428 goto do_dtor;
4430 else
4432 x = omp_reduction_init (c, TREE_TYPE (new_var));
4433 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4434 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4436 /* reduction(-:var) sums up the partial results, so it
4437 acts identically to reduction(+:var). */
4438 if (code == MINUS_EXPR)
4439 code = PLUS_EXPR;
4441 tree new_vard = new_var;
4442 if (is_simd && omp_is_reference (var))
4444 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4445 new_vard = TREE_OPERAND (new_var, 0);
4446 gcc_assert (DECL_P (new_vard));
4448 if (is_simd
4449 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4450 ivar, lvar))
4452 tree ref = build_outer_var_ref (var, ctx);
4454 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4456 if (sctx.is_simt)
4458 if (!simt_lane)
4459 simt_lane = create_tmp_var (unsigned_type_node);
4460 x = build_call_expr_internal_loc
4461 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4462 TREE_TYPE (ivar), 2, ivar, simt_lane);
4463 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4464 gimplify_assign (ivar, x, &llist[2]);
4466 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4467 ref = build_outer_var_ref (var, ctx);
4468 gimplify_assign (ref, x, &llist[1]);
4470 if (new_vard != new_var)
4472 SET_DECL_VALUE_EXPR (new_vard,
4473 build_fold_addr_expr (lvar));
4474 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4477 else
4479 if (omp_is_reference (var) && is_simd)
4480 handle_simd_reference (clause_loc, new_vard, ilist);
4481 gimplify_assign (new_var, x, ilist);
4482 if (is_simd)
4484 tree ref = build_outer_var_ref (var, ctx);
4486 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4487 ref = build_outer_var_ref (var, ctx);
4488 gimplify_assign (ref, x, dlist);
4492 break;
4494 default:
4495 gcc_unreachable ();
4500 if (sctx.max_vf == 1)
4501 sctx.is_simt = false;
4503 if (sctx.lane || sctx.is_simt)
4505 uid = create_tmp_var (ptr_type_node, "simduid");
4506 /* Don't want uninit warnings on simduid, it is always uninitialized,
4507 but we use it not for the value, but for the DECL_UID only. */
4508 TREE_NO_WARNING (uid) = 1;
4509 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4510 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4511 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4512 gimple_omp_for_set_clauses (ctx->stmt, c);
4514 /* Emit calls denoting privatized variables and initializing a pointer to
4515 structure that holds private variables as fields after ompdevlow pass. */
4516 if (sctx.is_simt)
4518 sctx.simt_eargs[0] = uid;
4519 gimple *g
4520 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4521 gimple_call_set_lhs (g, uid);
4522 gimple_seq_add_stmt (ilist, g);
4523 sctx.simt_eargs.release ();
4525 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4526 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4527 gimple_call_set_lhs (g, simtrec);
4528 gimple_seq_add_stmt (ilist, g);
4530 if (sctx.lane)
4532 gimple *g
4533 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4534 gimple_call_set_lhs (g, sctx.lane);
4535 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4536 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4537 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4538 build_int_cst (unsigned_type_node, 0));
4539 gimple_seq_add_stmt (ilist, g);
4540 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4541 if (llist[2])
4543 tree simt_vf = create_tmp_var (unsigned_type_node);
4544 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4545 gimple_call_set_lhs (g, simt_vf);
4546 gimple_seq_add_stmt (dlist, g);
4548 tree t = build_int_cst (unsigned_type_node, 1);
4549 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4550 gimple_seq_add_stmt (dlist, g);
4552 t = build_int_cst (unsigned_type_node, 0);
4553 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4554 gimple_seq_add_stmt (dlist, g);
4556 tree body = create_artificial_label (UNKNOWN_LOCATION);
4557 tree header = create_artificial_label (UNKNOWN_LOCATION);
4558 tree end = create_artificial_label (UNKNOWN_LOCATION);
4559 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4560 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4562 gimple_seq_add_seq (dlist, llist[2]);
4564 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4565 gimple_seq_add_stmt (dlist, g);
4567 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4568 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4569 gimple_seq_add_stmt (dlist, g);
4571 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4573 for (int i = 0; i < 2; i++)
4574 if (llist[i])
4576 tree vf = create_tmp_var (unsigned_type_node);
4577 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4578 gimple_call_set_lhs (g, vf);
4579 gimple_seq *seq = i == 0 ? ilist : dlist;
4580 gimple_seq_add_stmt (seq, g);
4581 tree t = build_int_cst (unsigned_type_node, 0);
4582 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4583 gimple_seq_add_stmt (seq, g);
4584 tree body = create_artificial_label (UNKNOWN_LOCATION);
4585 tree header = create_artificial_label (UNKNOWN_LOCATION);
4586 tree end = create_artificial_label (UNKNOWN_LOCATION);
4587 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4588 gimple_seq_add_stmt (seq, gimple_build_label (body));
4589 gimple_seq_add_seq (seq, llist[i]);
4590 t = build_int_cst (unsigned_type_node, 1);
4591 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4592 gimple_seq_add_stmt (seq, g);
4593 gimple_seq_add_stmt (seq, gimple_build_label (header));
4594 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4595 gimple_seq_add_stmt (seq, g);
4596 gimple_seq_add_stmt (seq, gimple_build_label (end));
4599 if (sctx.is_simt)
4601 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4602 gimple *g
4603 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4604 gimple_seq_add_stmt (dlist, g);
4607 /* The copyin sequence is not to be executed by the main thread, since
4608 that would result in self-copies. Perhaps not visible to scalars,
4609 but it certainly is to C++ operator=. */
4610 if (copyin_seq)
4612 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4614 x = build2 (NE_EXPR, boolean_type_node, x,
4615 build_int_cst (TREE_TYPE (x), 0));
4616 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4617 gimplify_and_add (x, ilist);
4620 /* If any copyin variable is passed by reference, we must ensure the
4621 master thread doesn't modify it before it is copied over in all
4622 threads. Similarly for variables in both firstprivate and
4623 lastprivate clauses we need to ensure the lastprivate copying
4624 happens after firstprivate copying in all threads. And similarly
4625 for UDRs if initializer expression refers to omp_orig. */
4626 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4628 /* Don't add any barrier for #pragma omp simd or
4629 #pragma omp distribute. */
4630 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4631 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4632 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4635 /* If max_vf is non-zero, then we can use only a vectorization factor
4636 up to the max_vf we chose. So stick it into the safelen clause. */
4637 if (sctx.max_vf)
4639 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4640 OMP_CLAUSE_SAFELEN);
4641 if (c == NULL_TREE
4642 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4643 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4644 sctx.max_vf) == 1))
4646 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4647 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4648 sctx.max_vf);
4649 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4650 gimple_omp_for_set_clauses (ctx->stmt, c);
4656 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4657 both parallel and workshare constructs. PREDICATE may be NULL if it's
4658 always true. */
4660 static void
4661 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4662 omp_context *ctx)
4664 tree x, c, label = NULL, orig_clauses = clauses;
4665 bool par_clauses = false;
4666 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4668 /* Early exit if there are no lastprivate or linear clauses. */
4669 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4670 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4671 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4672 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4673 break;
4674 if (clauses == NULL)
4676 /* If this was a workshare clause, see if it had been combined
4677 with its parallel. In that case, look for the clauses on the
4678 parallel statement itself. */
4679 if (is_parallel_ctx (ctx))
4680 return;
4682 ctx = ctx->outer;
4683 if (ctx == NULL || !is_parallel_ctx (ctx))
4684 return;
4686 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4687 OMP_CLAUSE_LASTPRIVATE);
4688 if (clauses == NULL)
4689 return;
4690 par_clauses = true;
4693 bool maybe_simt = false;
4694 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4695 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4697 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4698 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4699 if (simduid)
4700 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4703 if (predicate)
4705 gcond *stmt;
4706 tree label_true, arm1, arm2;
4707 enum tree_code pred_code = TREE_CODE (predicate);
4709 label = create_artificial_label (UNKNOWN_LOCATION);
4710 label_true = create_artificial_label (UNKNOWN_LOCATION);
4711 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4713 arm1 = TREE_OPERAND (predicate, 0);
4714 arm2 = TREE_OPERAND (predicate, 1);
4715 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4716 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4718 else
4720 arm1 = predicate;
4721 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4722 arm2 = boolean_false_node;
4723 pred_code = NE_EXPR;
4725 if (maybe_simt)
4727 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4728 c = fold_convert (integer_type_node, c);
4729 simtcond = create_tmp_var (integer_type_node);
4730 gimplify_assign (simtcond, c, stmt_list);
4731 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4732 1, simtcond);
4733 c = create_tmp_var (integer_type_node);
4734 gimple_call_set_lhs (g, c);
4735 gimple_seq_add_stmt (stmt_list, g);
4736 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4737 label_true, label);
4739 else
4740 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4741 gimple_seq_add_stmt (stmt_list, stmt);
4742 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4745 for (c = clauses; c ;)
4747 tree var, new_var;
4748 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4750 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4751 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4752 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4754 var = OMP_CLAUSE_DECL (c);
4755 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4756 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4757 && is_taskloop_ctx (ctx))
4759 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4760 new_var = lookup_decl (var, ctx->outer);
4762 else
4764 new_var = lookup_decl (var, ctx);
4765 /* Avoid uninitialized warnings for lastprivate and
4766 for linear iterators. */
4767 if (predicate
4768 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4769 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4770 TREE_NO_WARNING (new_var) = 1;
4773 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4775 tree val = DECL_VALUE_EXPR (new_var);
4776 if (!maybe_simt
4777 && TREE_CODE (val) == ARRAY_REF
4778 && VAR_P (TREE_OPERAND (val, 0))
4779 && lookup_attribute ("omp simd array",
4780 DECL_ATTRIBUTES (TREE_OPERAND (val,
4781 0))))
4783 if (lastlane == NULL)
4785 lastlane = create_tmp_var (unsigned_type_node);
4786 gcall *g
4787 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4788 2, simduid,
4789 TREE_OPERAND (val, 1));
4790 gimple_call_set_lhs (g, lastlane);
4791 gimple_seq_add_stmt (stmt_list, g);
4793 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4794 TREE_OPERAND (val, 0), lastlane,
4795 NULL_TREE, NULL_TREE);
4797 else if (maybe_simt
4798 && VAR_P (val)
4799 && lookup_attribute ("omp simt private",
4800 DECL_ATTRIBUTES (val)))
4802 if (simtlast == NULL)
4804 simtlast = create_tmp_var (unsigned_type_node);
4805 gcall *g = gimple_build_call_internal
4806 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4807 gimple_call_set_lhs (g, simtlast);
4808 gimple_seq_add_stmt (stmt_list, g);
4810 x = build_call_expr_internal_loc
4811 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4812 TREE_TYPE (val), 2, val, simtlast);
4813 new_var = unshare_expr (new_var);
4814 gimplify_assign (new_var, x, stmt_list);
4815 new_var = unshare_expr (new_var);
4819 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4820 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4822 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4823 gimple_seq_add_seq (stmt_list,
4824 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4825 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4827 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4828 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4830 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4831 gimple_seq_add_seq (stmt_list,
4832 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4833 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4836 x = NULL_TREE;
4837 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4838 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4840 gcc_checking_assert (is_taskloop_ctx (ctx));
4841 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4842 ctx->outer->outer);
4843 if (is_global_var (ovar))
4844 x = ovar;
4846 if (!x)
4847 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4848 if (omp_is_reference (var))
4849 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4850 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4851 gimplify_and_add (x, stmt_list);
4853 c = OMP_CLAUSE_CHAIN (c);
4854 if (c == NULL && !par_clauses)
4856 /* If this was a workshare clause, see if it had been combined
4857 with its parallel. In that case, continue looking for the
4858 clauses also on the parallel statement itself. */
4859 if (is_parallel_ctx (ctx))
4860 break;
4862 ctx = ctx->outer;
4863 if (ctx == NULL || !is_parallel_ctx (ctx))
4864 break;
4866 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4867 OMP_CLAUSE_LASTPRIVATE);
4868 par_clauses = true;
4872 if (label)
4873 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4876 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4877 (which might be a placeholder). INNER is true if this is an inner
4878 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4879 join markers. Generate the before-loop forking sequence in
4880 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4881 general form of these sequences is
4883 GOACC_REDUCTION_SETUP
4884 GOACC_FORK
4885 GOACC_REDUCTION_INIT
4887 GOACC_REDUCTION_FINI
4888 GOACC_JOIN
4889 GOACC_REDUCTION_TEARDOWN. */
4891 static void
4892 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4893 gcall *fork, gcall *join, gimple_seq *fork_seq,
4894 gimple_seq *join_seq, omp_context *ctx)
4896 gimple_seq before_fork = NULL;
4897 gimple_seq after_fork = NULL;
4898 gimple_seq before_join = NULL;
4899 gimple_seq after_join = NULL;
4900 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4901 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4902 unsigned offset = 0;
4904 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4905 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4907 tree orig = OMP_CLAUSE_DECL (c);
4908 tree var = maybe_lookup_decl (orig, ctx);
4909 tree ref_to_res = NULL_TREE;
4910 tree incoming, outgoing, v1, v2, v3;
4911 bool is_private = false;
4913 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4914 if (rcode == MINUS_EXPR)
4915 rcode = PLUS_EXPR;
4916 else if (rcode == TRUTH_ANDIF_EXPR)
4917 rcode = BIT_AND_EXPR;
4918 else if (rcode == TRUTH_ORIF_EXPR)
4919 rcode = BIT_IOR_EXPR;
4920 tree op = build_int_cst (unsigned_type_node, rcode);
4922 if (!var)
4923 var = orig;
4925 incoming = outgoing = var;
4927 if (!inner)
4929 /* See if an outer construct also reduces this variable. */
4930 omp_context *outer = ctx;
4932 while (omp_context *probe = outer->outer)
4934 enum gimple_code type = gimple_code (probe->stmt);
4935 tree cls;
4937 switch (type)
4939 case GIMPLE_OMP_FOR:
4940 cls = gimple_omp_for_clauses (probe->stmt);
4941 break;
4943 case GIMPLE_OMP_TARGET:
4944 if (gimple_omp_target_kind (probe->stmt)
4945 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4946 goto do_lookup;
4948 cls = gimple_omp_target_clauses (probe->stmt);
4949 break;
4951 default:
4952 goto do_lookup;
4955 outer = probe;
4956 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4957 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4958 && orig == OMP_CLAUSE_DECL (cls))
4960 incoming = outgoing = lookup_decl (orig, probe);
4961 goto has_outer_reduction;
4963 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4964 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4965 && orig == OMP_CLAUSE_DECL (cls))
4967 is_private = true;
4968 goto do_lookup;
4972 do_lookup:
4973 /* This is the outermost construct with this reduction,
4974 see if there's a mapping for it. */
4975 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4976 && maybe_lookup_field (orig, outer) && !is_private)
4978 ref_to_res = build_receiver_ref (orig, false, outer);
4979 if (omp_is_reference (orig))
4980 ref_to_res = build_simple_mem_ref (ref_to_res);
4982 tree type = TREE_TYPE (var);
4983 if (POINTER_TYPE_P (type))
4984 type = TREE_TYPE (type);
4986 outgoing = var;
4987 incoming = omp_reduction_init_op (loc, rcode, type);
4989 else
4991 /* Try to look at enclosing contexts for reduction var,
4992 use original if no mapping found. */
4993 tree t = NULL_TREE;
4994 omp_context *c = ctx->outer;
4995 while (c && !t)
4997 t = maybe_lookup_decl (orig, c);
4998 c = c->outer;
5000 incoming = outgoing = (t ? t : orig);
5003 has_outer_reduction:;
5006 if (!ref_to_res)
5007 ref_to_res = integer_zero_node;
5009 if (omp_is_reference (orig))
5011 tree type = TREE_TYPE (var);
5012 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5014 if (!inner)
5016 tree x = create_tmp_var (TREE_TYPE (type), id);
5017 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5020 v1 = create_tmp_var (type, id);
5021 v2 = create_tmp_var (type, id);
5022 v3 = create_tmp_var (type, id);
5024 gimplify_assign (v1, var, fork_seq);
5025 gimplify_assign (v2, var, fork_seq);
5026 gimplify_assign (v3, var, fork_seq);
5028 var = build_simple_mem_ref (var);
5029 v1 = build_simple_mem_ref (v1);
5030 v2 = build_simple_mem_ref (v2);
5031 v3 = build_simple_mem_ref (v3);
5032 outgoing = build_simple_mem_ref (outgoing);
5034 if (!TREE_CONSTANT (incoming))
5035 incoming = build_simple_mem_ref (incoming);
5037 else
5038 v1 = v2 = v3 = var;
5040 /* Determine position in reduction buffer, which may be used
5041 by target. */
5042 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5043 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5044 offset = (offset + align - 1) & ~(align - 1);
5045 tree off = build_int_cst (sizetype, offset);
5046 offset += GET_MODE_SIZE (mode);
5048 if (!init_code)
5050 init_code = build_int_cst (integer_type_node,
5051 IFN_GOACC_REDUCTION_INIT);
5052 fini_code = build_int_cst (integer_type_node,
5053 IFN_GOACC_REDUCTION_FINI);
5054 setup_code = build_int_cst (integer_type_node,
5055 IFN_GOACC_REDUCTION_SETUP);
5056 teardown_code = build_int_cst (integer_type_node,
5057 IFN_GOACC_REDUCTION_TEARDOWN);
5060 tree setup_call
5061 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5062 TREE_TYPE (var), 6, setup_code,
5063 unshare_expr (ref_to_res),
5064 incoming, level, op, off);
5065 tree init_call
5066 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5067 TREE_TYPE (var), 6, init_code,
5068 unshare_expr (ref_to_res),
5069 v1, level, op, off);
5070 tree fini_call
5071 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5072 TREE_TYPE (var), 6, fini_code,
5073 unshare_expr (ref_to_res),
5074 v2, level, op, off);
5075 tree teardown_call
5076 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5077 TREE_TYPE (var), 6, teardown_code,
5078 ref_to_res, v3, level, op, off);
5080 gimplify_assign (v1, setup_call, &before_fork);
5081 gimplify_assign (v2, init_call, &after_fork);
5082 gimplify_assign (v3, fini_call, &before_join);
5083 gimplify_assign (outgoing, teardown_call, &after_join);
5086 /* Now stitch things together. */
5087 gimple_seq_add_seq (fork_seq, before_fork);
5088 if (fork)
5089 gimple_seq_add_stmt (fork_seq, fork);
5090 gimple_seq_add_seq (fork_seq, after_fork);
5092 gimple_seq_add_seq (join_seq, before_join);
5093 if (join)
5094 gimple_seq_add_stmt (join_seq, join);
5095 gimple_seq_add_seq (join_seq, after_join);
5098 /* Generate code to implement the REDUCTION clauses. */
5100 static void
5101 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5103 gimple_seq sub_seq = NULL;
5104 gimple *stmt;
5105 tree x, c;
5106 int count = 0;
5108 /* OpenACC loop reductions are handled elsewhere. */
5109 if (is_gimple_omp_oacc (ctx->stmt))
5110 return;
5112 /* SIMD reductions are handled in lower_rec_input_clauses. */
5113 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5114 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5115 return;
5117 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5118 update in that case, otherwise use a lock. */
5119 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5120 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5122 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5123 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5125 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5126 count = -1;
5127 break;
5129 count++;
5132 if (count == 0)
5133 return;
5135 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5137 tree var, ref, new_var, orig_var;
5138 enum tree_code code;
5139 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5141 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5142 continue;
5144 orig_var = var = OMP_CLAUSE_DECL (c);
5145 if (TREE_CODE (var) == MEM_REF)
5147 var = TREE_OPERAND (var, 0);
5148 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5149 var = TREE_OPERAND (var, 0);
5150 if (TREE_CODE (var) == INDIRECT_REF
5151 || TREE_CODE (var) == ADDR_EXPR)
5152 var = TREE_OPERAND (var, 0);
5153 orig_var = var;
5154 if (is_variable_sized (var))
5156 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5157 var = DECL_VALUE_EXPR (var);
5158 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5159 var = TREE_OPERAND (var, 0);
5160 gcc_assert (DECL_P (var));
5163 new_var = lookup_decl (var, ctx);
5164 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5165 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5166 ref = build_outer_var_ref (var, ctx);
5167 code = OMP_CLAUSE_REDUCTION_CODE (c);
5169 /* reduction(-:var) sums up the partial results, so it acts
5170 identically to reduction(+:var). */
5171 if (code == MINUS_EXPR)
5172 code = PLUS_EXPR;
5174 if (count == 1)
5176 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5178 addr = save_expr (addr);
5179 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5180 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5181 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5182 gimplify_and_add (x, stmt_seqp);
5183 return;
5185 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5187 tree d = OMP_CLAUSE_DECL (c);
5188 tree type = TREE_TYPE (d);
5189 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5190 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5191 tree ptype = build_pointer_type (TREE_TYPE (type));
5192 tree bias = TREE_OPERAND (d, 1);
5193 d = TREE_OPERAND (d, 0);
5194 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5196 tree b = TREE_OPERAND (d, 1);
5197 b = maybe_lookup_decl (b, ctx);
5198 if (b == NULL)
5200 b = TREE_OPERAND (d, 1);
5201 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5203 if (integer_zerop (bias))
5204 bias = b;
5205 else
5207 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5208 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5209 TREE_TYPE (b), b, bias);
5211 d = TREE_OPERAND (d, 0);
5213 /* For ref build_outer_var_ref already performs this, so
5214 only new_var needs a dereference. */
5215 if (TREE_CODE (d) == INDIRECT_REF)
5217 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5218 gcc_assert (omp_is_reference (var) && var == orig_var);
5220 else if (TREE_CODE (d) == ADDR_EXPR)
5222 if (orig_var == var)
5224 new_var = build_fold_addr_expr (new_var);
5225 ref = build_fold_addr_expr (ref);
5228 else
5230 gcc_assert (orig_var == var);
5231 if (omp_is_reference (var))
5232 ref = build_fold_addr_expr (ref);
5234 if (DECL_P (v))
5236 tree t = maybe_lookup_decl (v, ctx);
5237 if (t)
5238 v = t;
5239 else
5240 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5241 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5243 if (!integer_zerop (bias))
5245 bias = fold_convert_loc (clause_loc, sizetype, bias);
5246 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5247 TREE_TYPE (new_var), new_var,
5248 unshare_expr (bias));
5249 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5250 TREE_TYPE (ref), ref, bias);
5252 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5253 ref = fold_convert_loc (clause_loc, ptype, ref);
5254 tree m = create_tmp_var (ptype, NULL);
5255 gimplify_assign (m, new_var, stmt_seqp);
5256 new_var = m;
5257 m = create_tmp_var (ptype, NULL);
5258 gimplify_assign (m, ref, stmt_seqp);
5259 ref = m;
5260 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5261 tree body = create_artificial_label (UNKNOWN_LOCATION);
5262 tree end = create_artificial_label (UNKNOWN_LOCATION);
5263 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5264 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5265 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5266 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5268 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5269 tree decl_placeholder
5270 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5271 SET_DECL_VALUE_EXPR (placeholder, out);
5272 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5273 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5274 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5275 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5276 gimple_seq_add_seq (&sub_seq,
5277 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5278 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5279 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5280 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5282 else
5284 x = build2 (code, TREE_TYPE (out), out, priv);
5285 out = unshare_expr (out);
5286 gimplify_assign (out, x, &sub_seq);
5288 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5289 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5290 gimple_seq_add_stmt (&sub_seq, g);
5291 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5292 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5293 gimple_seq_add_stmt (&sub_seq, g);
5294 g = gimple_build_assign (i, PLUS_EXPR, i,
5295 build_int_cst (TREE_TYPE (i), 1));
5296 gimple_seq_add_stmt (&sub_seq, g);
5297 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5298 gimple_seq_add_stmt (&sub_seq, g);
5299 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5301 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5303 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5305 if (omp_is_reference (var)
5306 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5307 TREE_TYPE (ref)))
5308 ref = build_fold_addr_expr_loc (clause_loc, ref);
5309 SET_DECL_VALUE_EXPR (placeholder, ref);
5310 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5311 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5312 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5313 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5314 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5316 else
5318 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5319 ref = build_outer_var_ref (var, ctx);
5320 gimplify_assign (ref, x, &sub_seq);
5324 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5326 gimple_seq_add_stmt (stmt_seqp, stmt);
5328 gimple_seq_add_seq (stmt_seqp, sub_seq);
5330 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5332 gimple_seq_add_stmt (stmt_seqp, stmt);
5336 /* Generate code to implement the COPYPRIVATE clauses. */
5338 static void
5339 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5340 omp_context *ctx)
5342 tree c;
5344 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5346 tree var, new_var, ref, x;
5347 bool by_ref;
5348 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5350 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5351 continue;
5353 var = OMP_CLAUSE_DECL (c);
5354 by_ref = use_pointer_for_field (var, NULL);
5356 ref = build_sender_ref (var, ctx);
5357 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5358 if (by_ref)
5360 x = build_fold_addr_expr_loc (clause_loc, new_var);
5361 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5363 gimplify_assign (ref, x, slist);
5365 ref = build_receiver_ref (var, false, ctx);
5366 if (by_ref)
5368 ref = fold_convert_loc (clause_loc,
5369 build_pointer_type (TREE_TYPE (new_var)),
5370 ref);
5371 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5373 if (omp_is_reference (var))
5375 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5376 ref = build_simple_mem_ref_loc (clause_loc, ref);
5377 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5379 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5380 gimplify_and_add (x, rlist);
5385 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5386 and REDUCTION from the sender (aka parent) side. */
5388 static void
5389 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5390 omp_context *ctx)
5392 tree c, t;
5393 int ignored_looptemp = 0;
5394 bool is_taskloop = false;
5396 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5397 by GOMP_taskloop. */
5398 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5400 ignored_looptemp = 2;
5401 is_taskloop = true;
5404 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5406 tree val, ref, x, var;
5407 bool by_ref, do_in = false, do_out = false;
5408 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5410 switch (OMP_CLAUSE_CODE (c))
5412 case OMP_CLAUSE_PRIVATE:
5413 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5414 break;
5415 continue;
5416 case OMP_CLAUSE_FIRSTPRIVATE:
5417 case OMP_CLAUSE_COPYIN:
5418 case OMP_CLAUSE_LASTPRIVATE:
5419 case OMP_CLAUSE_REDUCTION:
5420 break;
5421 case OMP_CLAUSE_SHARED:
5422 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5423 break;
5424 continue;
5425 case OMP_CLAUSE__LOOPTEMP_:
5426 if (ignored_looptemp)
5428 ignored_looptemp--;
5429 continue;
5431 break;
5432 default:
5433 continue;
5436 val = OMP_CLAUSE_DECL (c);
5437 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5438 && TREE_CODE (val) == MEM_REF)
5440 val = TREE_OPERAND (val, 0);
5441 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5442 val = TREE_OPERAND (val, 0);
5443 if (TREE_CODE (val) == INDIRECT_REF
5444 || TREE_CODE (val) == ADDR_EXPR)
5445 val = TREE_OPERAND (val, 0);
5446 if (is_variable_sized (val))
5447 continue;
5450 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5451 outer taskloop region. */
5452 omp_context *ctx_for_o = ctx;
5453 if (is_taskloop
5454 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5455 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5456 ctx_for_o = ctx->outer;
5458 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5460 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5461 && is_global_var (var))
5462 continue;
5464 t = omp_member_access_dummy_var (var);
5465 if (t)
5467 var = DECL_VALUE_EXPR (var);
5468 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5469 if (o != t)
5470 var = unshare_and_remap (var, t, o);
5471 else
5472 var = unshare_expr (var);
5475 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5477 /* Handle taskloop firstprivate/lastprivate, where the
5478 lastprivate on GIMPLE_OMP_TASK is represented as
5479 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5480 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5481 x = omp_build_component_ref (ctx->sender_decl, f);
5482 if (use_pointer_for_field (val, ctx))
5483 var = build_fold_addr_expr (var);
5484 gimplify_assign (x, var, ilist);
5485 DECL_ABSTRACT_ORIGIN (f) = NULL;
5486 continue;
5489 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5490 || val == OMP_CLAUSE_DECL (c))
5491 && is_variable_sized (val))
5492 continue;
5493 by_ref = use_pointer_for_field (val, NULL);
5495 switch (OMP_CLAUSE_CODE (c))
5497 case OMP_CLAUSE_FIRSTPRIVATE:
5498 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5499 && !by_ref
5500 && is_task_ctx (ctx))
5501 TREE_NO_WARNING (var) = 1;
5502 do_in = true;
5503 break;
5505 case OMP_CLAUSE_PRIVATE:
5506 case OMP_CLAUSE_COPYIN:
5507 case OMP_CLAUSE__LOOPTEMP_:
5508 do_in = true;
5509 break;
5511 case OMP_CLAUSE_LASTPRIVATE:
5512 if (by_ref || omp_is_reference (val))
5514 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5515 continue;
5516 do_in = true;
5518 else
5520 do_out = true;
5521 if (lang_hooks.decls.omp_private_outer_ref (val))
5522 do_in = true;
5524 break;
5526 case OMP_CLAUSE_REDUCTION:
5527 do_in = true;
5528 if (val == OMP_CLAUSE_DECL (c))
5529 do_out = !(by_ref || omp_is_reference (val));
5530 else
5531 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5532 break;
5534 default:
5535 gcc_unreachable ();
5538 if (do_in)
5540 ref = build_sender_ref (val, ctx);
5541 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5542 gimplify_assign (ref, x, ilist);
5543 if (is_task_ctx (ctx))
5544 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5547 if (do_out)
5549 ref = build_sender_ref (val, ctx);
5550 gimplify_assign (var, ref, olist);
5555 /* Generate code to implement SHARED from the sender (aka parent)
5556 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5557 list things that got automatically shared. */
5559 static void
5560 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5562 tree var, ovar, nvar, t, f, x, record_type;
5564 if (ctx->record_type == NULL)
5565 return;
5567 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5568 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5570 ovar = DECL_ABSTRACT_ORIGIN (f);
5571 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5572 continue;
5574 nvar = maybe_lookup_decl (ovar, ctx);
5575 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5576 continue;
5578 /* If CTX is a nested parallel directive. Find the immediately
5579 enclosing parallel or workshare construct that contains a
5580 mapping for OVAR. */
5581 var = lookup_decl_in_outer_ctx (ovar, ctx);
5583 t = omp_member_access_dummy_var (var);
5584 if (t)
5586 var = DECL_VALUE_EXPR (var);
5587 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5588 if (o != t)
5589 var = unshare_and_remap (var, t, o);
5590 else
5591 var = unshare_expr (var);
5594 if (use_pointer_for_field (ovar, ctx))
5596 x = build_sender_ref (ovar, ctx);
5597 var = build_fold_addr_expr (var);
5598 gimplify_assign (x, var, ilist);
5600 else
5602 x = build_sender_ref (ovar, ctx);
5603 gimplify_assign (x, var, ilist);
5605 if (!TREE_READONLY (var)
5606 /* We don't need to receive a new reference to a result
5607 or parm decl. In fact we may not store to it as we will
5608 invalidate any pending RSO and generate wrong gimple
5609 during inlining. */
5610 && !((TREE_CODE (var) == RESULT_DECL
5611 || TREE_CODE (var) == PARM_DECL)
5612 && DECL_BY_REFERENCE (var)))
5614 x = build_sender_ref (ovar, ctx);
5615 gimplify_assign (var, x, olist);
5621 /* Emit an OpenACC head marker call, encapulating the partitioning and
5622 other information that must be processed by the target compiler.
5623 Return the maximum number of dimensions the associated loop might
5624 be partitioned over. */
5626 static unsigned
5627 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5628 gimple_seq *seq, omp_context *ctx)
5630 unsigned levels = 0;
5631 unsigned tag = 0;
5632 tree gang_static = NULL_TREE;
5633 auto_vec<tree, 5> args;
5635 args.quick_push (build_int_cst
5636 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5637 args.quick_push (ddvar);
5638 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5640 switch (OMP_CLAUSE_CODE (c))
5642 case OMP_CLAUSE_GANG:
5643 tag |= OLF_DIM_GANG;
5644 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5645 /* static:* is represented by -1, and we can ignore it, as
5646 scheduling is always static. */
5647 if (gang_static && integer_minus_onep (gang_static))
5648 gang_static = NULL_TREE;
5649 levels++;
5650 break;
5652 case OMP_CLAUSE_WORKER:
5653 tag |= OLF_DIM_WORKER;
5654 levels++;
5655 break;
5657 case OMP_CLAUSE_VECTOR:
5658 tag |= OLF_DIM_VECTOR;
5659 levels++;
5660 break;
5662 case OMP_CLAUSE_SEQ:
5663 tag |= OLF_SEQ;
5664 break;
5666 case OMP_CLAUSE_AUTO:
5667 tag |= OLF_AUTO;
5668 break;
5670 case OMP_CLAUSE_INDEPENDENT:
5671 tag |= OLF_INDEPENDENT;
5672 break;
5674 case OMP_CLAUSE_TILE:
5675 tag |= OLF_TILE;
5676 break;
5678 default:
5679 continue;
5683 if (gang_static)
5685 if (DECL_P (gang_static))
5686 gang_static = build_outer_var_ref (gang_static, ctx);
5687 tag |= OLF_GANG_STATIC;
5690 /* In a parallel region, loops are implicitly INDEPENDENT. */
5691 omp_context *tgt = enclosing_target_ctx (ctx);
5692 if (!tgt || is_oacc_parallel (tgt))
5693 tag |= OLF_INDEPENDENT;
5695 if (tag & OLF_TILE)
5696 /* Tiling could use all 3 levels. */
5697 levels = 3;
5698 else
5700 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5701 Ensure at least one level, or 2 for possible auto
5702 partitioning */
5703 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5704 << OLF_DIM_BASE) | OLF_SEQ));
5706 if (levels < 1u + maybe_auto)
5707 levels = 1u + maybe_auto;
5710 args.quick_push (build_int_cst (integer_type_node, levels));
5711 args.quick_push (build_int_cst (integer_type_node, tag));
5712 if (gang_static)
5713 args.quick_push (gang_static);
5715 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5716 gimple_set_location (call, loc);
5717 gimple_set_lhs (call, ddvar);
5718 gimple_seq_add_stmt (seq, call);
5720 return levels;
5723 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5724 partitioning level of the enclosed region. */
5726 static void
5727 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5728 tree tofollow, gimple_seq *seq)
5730 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5731 : IFN_UNIQUE_OACC_TAIL_MARK);
5732 tree marker = build_int_cst (integer_type_node, marker_kind);
5733 int nargs = 2 + (tofollow != NULL_TREE);
5734 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5735 marker, ddvar, tofollow);
5736 gimple_set_location (call, loc);
5737 gimple_set_lhs (call, ddvar);
5738 gimple_seq_add_stmt (seq, call);
5741 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5742 the loop clauses, from which we extract reductions. Initialize
5743 HEAD and TAIL. */
5745 static void
5746 lower_oacc_head_tail (location_t loc, tree clauses,
5747 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5749 bool inner = false;
5750 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5751 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5753 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5754 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5755 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5757 gcc_assert (count);
5758 for (unsigned done = 1; count; count--, done++)
5760 gimple_seq fork_seq = NULL;
5761 gimple_seq join_seq = NULL;
5763 tree place = build_int_cst (integer_type_node, -1);
5764 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5765 fork_kind, ddvar, place);
5766 gimple_set_location (fork, loc);
5767 gimple_set_lhs (fork, ddvar);
5769 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5770 join_kind, ddvar, place);
5771 gimple_set_location (join, loc);
5772 gimple_set_lhs (join, ddvar);
5774 /* Mark the beginning of this level sequence. */
5775 if (inner)
5776 lower_oacc_loop_marker (loc, ddvar, true,
5777 build_int_cst (integer_type_node, count),
5778 &fork_seq);
5779 lower_oacc_loop_marker (loc, ddvar, false,
5780 build_int_cst (integer_type_node, done),
5781 &join_seq);
5783 lower_oacc_reductions (loc, clauses, place, inner,
5784 fork, join, &fork_seq, &join_seq, ctx);
5786 /* Append this level to head. */
5787 gimple_seq_add_seq (head, fork_seq);
5788 /* Prepend it to tail. */
5789 gimple_seq_add_seq (&join_seq, *tail);
5790 *tail = join_seq;
5792 inner = true;
5795 /* Mark the end of the sequence. */
5796 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5797 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5800 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5801 catch handler and return it. This prevents programs from violating the
5802 structured block semantics with throws. */
5804 static gimple_seq
5805 maybe_catch_exception (gimple_seq body)
5807 gimple *g;
5808 tree decl;
5810 if (!flag_exceptions)
5811 return body;
5813 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5814 decl = lang_hooks.eh_protect_cleanup_actions ();
5815 else
5816 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5818 g = gimple_build_eh_must_not_throw (decl);
5819 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5820 GIMPLE_TRY_CATCH);
5822 return gimple_seq_alloc_with_stmt (g);
5826 /* Routines to lower OMP directives into OMP-GIMPLE. */
5828 /* If ctx is a worksharing context inside of a cancellable parallel
5829 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5830 and conditional branch to parallel's cancel_label to handle
5831 cancellation in the implicit barrier. */
5833 static void
5834 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5836 gimple *omp_return = gimple_seq_last_stmt (*body);
5837 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5838 if (gimple_omp_return_nowait_p (omp_return))
5839 return;
5840 if (ctx->outer
5841 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5842 && ctx->outer->cancellable)
5844 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5845 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5846 tree lhs = create_tmp_var (c_bool_type);
5847 gimple_omp_return_set_lhs (omp_return, lhs);
5848 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5849 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5850 fold_convert (c_bool_type,
5851 boolean_false_node),
5852 ctx->outer->cancel_label, fallthru_label);
5853 gimple_seq_add_stmt (body, g);
5854 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5858 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5859 CTX is the enclosing OMP context for the current statement. */
5861 static void
5862 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5864 tree block, control;
5865 gimple_stmt_iterator tgsi;
5866 gomp_sections *stmt;
5867 gimple *t;
5868 gbind *new_stmt, *bind;
5869 gimple_seq ilist, dlist, olist, new_body;
5871 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5873 push_gimplify_context ();
5875 dlist = NULL;
5876 ilist = NULL;
5877 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5878 &ilist, &dlist, ctx, NULL);
5880 new_body = gimple_omp_body (stmt);
5881 gimple_omp_set_body (stmt, NULL);
5882 tgsi = gsi_start (new_body);
5883 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5885 omp_context *sctx;
5886 gimple *sec_start;
5888 sec_start = gsi_stmt (tgsi);
5889 sctx = maybe_lookup_ctx (sec_start);
5890 gcc_assert (sctx);
5892 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5893 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5894 GSI_CONTINUE_LINKING);
5895 gimple_omp_set_body (sec_start, NULL);
5897 if (gsi_one_before_end_p (tgsi))
5899 gimple_seq l = NULL;
5900 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5901 &l, ctx);
5902 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5903 gimple_omp_section_set_last (sec_start);
5906 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5907 GSI_CONTINUE_LINKING);
5910 block = make_node (BLOCK);
5911 bind = gimple_build_bind (NULL, new_body, block);
5913 olist = NULL;
5914 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5916 block = make_node (BLOCK);
5917 new_stmt = gimple_build_bind (NULL, NULL, block);
5918 gsi_replace (gsi_p, new_stmt, true);
5920 pop_gimplify_context (new_stmt);
5921 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5922 BLOCK_VARS (block) = gimple_bind_vars (bind);
5923 if (BLOCK_VARS (block))
5924 TREE_USED (block) = 1;
5926 new_body = NULL;
5927 gimple_seq_add_seq (&new_body, ilist);
5928 gimple_seq_add_stmt (&new_body, stmt);
5929 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5930 gimple_seq_add_stmt (&new_body, bind);
5932 control = create_tmp_var (unsigned_type_node, ".section");
5933 t = gimple_build_omp_continue (control, control);
5934 gimple_omp_sections_set_control (stmt, control);
5935 gimple_seq_add_stmt (&new_body, t);
5937 gimple_seq_add_seq (&new_body, olist);
5938 if (ctx->cancellable)
5939 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5940 gimple_seq_add_seq (&new_body, dlist);
5942 new_body = maybe_catch_exception (new_body);
5944 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5945 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5946 t = gimple_build_omp_return (nowait);
5947 gimple_seq_add_stmt (&new_body, t);
5948 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5950 gimple_bind_set_body (new_stmt, new_body);
5954 /* A subroutine of lower_omp_single. Expand the simple form of
5955 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5957 if (GOMP_single_start ())
5958 BODY;
5959 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5961 FIXME. It may be better to delay expanding the logic of this until
5962 pass_expand_omp. The expanded logic may make the job more difficult
5963 to a synchronization analysis pass. */
5965 static void
5966 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5968 location_t loc = gimple_location (single_stmt);
5969 tree tlabel = create_artificial_label (loc);
5970 tree flabel = create_artificial_label (loc);
5971 gimple *call, *cond;
5972 tree lhs, decl;
5974 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5975 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5976 call = gimple_build_call (decl, 0);
5977 gimple_call_set_lhs (call, lhs);
5978 gimple_seq_add_stmt (pre_p, call);
5980 cond = gimple_build_cond (EQ_EXPR, lhs,
5981 fold_convert_loc (loc, TREE_TYPE (lhs),
5982 boolean_true_node),
5983 tlabel, flabel);
5984 gimple_seq_add_stmt (pre_p, cond);
5985 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5986 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5987 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5991 /* A subroutine of lower_omp_single. Expand the simple form of
5992 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5994 #pragma omp single copyprivate (a, b, c)
5996 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5999 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6001 BODY;
6002 copyout.a = a;
6003 copyout.b = b;
6004 copyout.c = c;
6005 GOMP_single_copy_end (&copyout);
6007 else
6009 a = copyout_p->a;
6010 b = copyout_p->b;
6011 c = copyout_p->c;
6013 GOMP_barrier ();
6016 FIXME. It may be better to delay expanding the logic of this until
6017 pass_expand_omp. The expanded logic may make the job more difficult
6018 to a synchronization analysis pass. */
6020 static void
6021 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6022 omp_context *ctx)
6024 tree ptr_type, t, l0, l1, l2, bfn_decl;
6025 gimple_seq copyin_seq;
6026 location_t loc = gimple_location (single_stmt);
6028 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6030 ptr_type = build_pointer_type (ctx->record_type);
6031 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6033 l0 = create_artificial_label (loc);
6034 l1 = create_artificial_label (loc);
6035 l2 = create_artificial_label (loc);
6037 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6038 t = build_call_expr_loc (loc, bfn_decl, 0);
6039 t = fold_convert_loc (loc, ptr_type, t);
6040 gimplify_assign (ctx->receiver_decl, t, pre_p);
6042 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6043 build_int_cst (ptr_type, 0));
6044 t = build3 (COND_EXPR, void_type_node, t,
6045 build_and_jump (&l0), build_and_jump (&l1));
6046 gimplify_and_add (t, pre_p);
6048 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6050 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6052 copyin_seq = NULL;
6053 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6054 &copyin_seq, ctx);
6056 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6057 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6058 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6059 gimplify_and_add (t, pre_p);
6061 t = build_and_jump (&l2);
6062 gimplify_and_add (t, pre_p);
6064 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6066 gimple_seq_add_seq (pre_p, copyin_seq);
6068 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6072 /* Expand code for an OpenMP single directive. */
6074 static void
6075 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6077 tree block;
6078 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6079 gbind *bind;
6080 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6082 push_gimplify_context ();
6084 block = make_node (BLOCK);
6085 bind = gimple_build_bind (NULL, NULL, block);
6086 gsi_replace (gsi_p, bind, true);
6087 bind_body = NULL;
6088 dlist = NULL;
6089 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6090 &bind_body, &dlist, ctx, NULL);
6091 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6093 gimple_seq_add_stmt (&bind_body, single_stmt);
6095 if (ctx->record_type)
6096 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6097 else
6098 lower_omp_single_simple (single_stmt, &bind_body);
6100 gimple_omp_set_body (single_stmt, NULL);
6102 gimple_seq_add_seq (&bind_body, dlist);
6104 bind_body = maybe_catch_exception (bind_body);
6106 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6107 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6108 gimple *g = gimple_build_omp_return (nowait);
6109 gimple_seq_add_stmt (&bind_body_tail, g);
6110 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6111 if (ctx->record_type)
6113 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6114 tree clobber = build_constructor (ctx->record_type, NULL);
6115 TREE_THIS_VOLATILE (clobber) = 1;
6116 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6117 clobber), GSI_SAME_STMT);
6119 gimple_seq_add_seq (&bind_body, bind_body_tail);
6120 gimple_bind_set_body (bind, bind_body);
6122 pop_gimplify_context (bind);
6124 gimple_bind_append_vars (bind, ctx->block_vars);
6125 BLOCK_VARS (block) = ctx->block_vars;
6126 if (BLOCK_VARS (block))
6127 TREE_USED (block) = 1;
6131 /* Expand code for an OpenMP master directive. */
6133 static void
6134 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6136 tree block, lab = NULL, x, bfn_decl;
6137 gimple *stmt = gsi_stmt (*gsi_p);
6138 gbind *bind;
6139 location_t loc = gimple_location (stmt);
6140 gimple_seq tseq;
6142 push_gimplify_context ();
6144 block = make_node (BLOCK);
6145 bind = gimple_build_bind (NULL, NULL, block);
6146 gsi_replace (gsi_p, bind, true);
6147 gimple_bind_add_stmt (bind, stmt);
6149 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6150 x = build_call_expr_loc (loc, bfn_decl, 0);
6151 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6152 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6153 tseq = NULL;
6154 gimplify_and_add (x, &tseq);
6155 gimple_bind_add_seq (bind, tseq);
6157 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6158 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6159 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6160 gimple_omp_set_body (stmt, NULL);
6162 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6164 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6166 pop_gimplify_context (bind);
6168 gimple_bind_append_vars (bind, ctx->block_vars);
6169 BLOCK_VARS (block) = ctx->block_vars;
6173 /* Expand code for an OpenMP taskgroup directive. */
6175 static void
6176 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6178 gimple *stmt = gsi_stmt (*gsi_p);
6179 gcall *x;
6180 gbind *bind;
6181 tree block = make_node (BLOCK);
6183 bind = gimple_build_bind (NULL, NULL, block);
6184 gsi_replace (gsi_p, bind, true);
6185 gimple_bind_add_stmt (bind, stmt);
6187 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6189 gimple_bind_add_stmt (bind, x);
6191 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6192 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6193 gimple_omp_set_body (stmt, NULL);
6195 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6197 gimple_bind_append_vars (bind, ctx->block_vars);
6198 BLOCK_VARS (block) = ctx->block_vars;
6202 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6204 static void
6205 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6206 omp_context *ctx)
6208 struct omp_for_data fd;
6209 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6210 return;
6212 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6213 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6214 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6215 if (!fd.ordered)
6216 return;
6218 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6219 tree c = gimple_omp_ordered_clauses (ord_stmt);
6220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6221 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6223 /* Merge depend clauses from multiple adjacent
6224 #pragma omp ordered depend(sink:...) constructs
6225 into one #pragma omp ordered depend(sink:...), so that
6226 we can optimize them together. */
6227 gimple_stmt_iterator gsi = *gsi_p;
6228 gsi_next (&gsi);
6229 while (!gsi_end_p (gsi))
6231 gimple *stmt = gsi_stmt (gsi);
6232 if (is_gimple_debug (stmt)
6233 || gimple_code (stmt) == GIMPLE_NOP)
6235 gsi_next (&gsi);
6236 continue;
6238 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6239 break;
6240 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6241 c = gimple_omp_ordered_clauses (ord_stmt2);
6242 if (c == NULL_TREE
6243 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6244 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6245 break;
6246 while (*list_p)
6247 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6248 *list_p = c;
6249 gsi_remove (&gsi, true);
6253 /* Canonicalize sink dependence clauses into one folded clause if
6254 possible.
6256 The basic algorithm is to create a sink vector whose first
6257 element is the GCD of all the first elements, and whose remaining
6258 elements are the minimum of the subsequent columns.
6260 We ignore dependence vectors whose first element is zero because
6261 such dependencies are known to be executed by the same thread.
6263 We take into account the direction of the loop, so a minimum
6264 becomes a maximum if the loop is iterating forwards. We also
6265 ignore sink clauses where the loop direction is unknown, or where
6266 the offsets are clearly invalid because they are not a multiple
6267 of the loop increment.
6269 For example:
6271 #pragma omp for ordered(2)
6272 for (i=0; i < N; ++i)
6273 for (j=0; j < M; ++j)
6275 #pragma omp ordered \
6276 depend(sink:i-8,j-2) \
6277 depend(sink:i,j-1) \ // Completely ignored because i+0.
6278 depend(sink:i-4,j-3) \
6279 depend(sink:i-6,j-4)
6280 #pragma omp ordered depend(source)
6283 Folded clause is:
6285 depend(sink:-gcd(8,4,6),-min(2,3,4))
6286 -or-
6287 depend(sink:-2,-2)
6290 /* FIXME: Computing GCD's where the first element is zero is
6291 non-trivial in the presence of collapsed loops. Do this later. */
6292 if (fd.collapse > 1)
6293 return;
6295 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6296 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6297 tree folded_dep = NULL_TREE;
6298 /* TRUE if the first dimension's offset is negative. */
6299 bool neg_offset_p = false;
6301 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6302 unsigned int i;
6303 while ((c = *list_p) != NULL)
6305 bool remove = false;
6307 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6308 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6309 goto next_ordered_clause;
6311 tree vec;
6312 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6313 vec && TREE_CODE (vec) == TREE_LIST;
6314 vec = TREE_CHAIN (vec), ++i)
6316 gcc_assert (i < len);
6318 /* omp_extract_for_data has canonicalized the condition. */
6319 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6320 || fd.loops[i].cond_code == GT_EXPR);
6321 bool forward = fd.loops[i].cond_code == LT_EXPR;
6322 bool maybe_lexically_later = true;
6324 /* While the committee makes up its mind, bail if we have any
6325 non-constant steps. */
6326 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6327 goto lower_omp_ordered_ret;
6329 tree itype = TREE_TYPE (TREE_VALUE (vec));
6330 if (POINTER_TYPE_P (itype))
6331 itype = sizetype;
6332 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6333 TYPE_PRECISION (itype),
6334 TYPE_SIGN (itype));
6336 /* Ignore invalid offsets that are not multiples of the step. */
6337 if (!wi::multiple_of_p
6338 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6339 UNSIGNED))
6341 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6342 "ignoring sink clause with offset that is not "
6343 "a multiple of the loop step");
6344 remove = true;
6345 goto next_ordered_clause;
6348 /* Calculate the first dimension. The first dimension of
6349 the folded dependency vector is the GCD of the first
6350 elements, while ignoring any first elements whose offset
6351 is 0. */
6352 if (i == 0)
6354 /* Ignore dependence vectors whose first dimension is 0. */
6355 if (offset == 0)
6357 remove = true;
6358 goto next_ordered_clause;
6360 else
6362 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6364 error_at (OMP_CLAUSE_LOCATION (c),
6365 "first offset must be in opposite direction "
6366 "of loop iterations");
6367 goto lower_omp_ordered_ret;
6369 if (forward)
6370 offset = -offset;
6371 neg_offset_p = forward;
6372 /* Initialize the first time around. */
6373 if (folded_dep == NULL_TREE)
6375 folded_dep = c;
6376 folded_deps[0] = offset;
6378 else
6379 folded_deps[0] = wi::gcd (folded_deps[0],
6380 offset, UNSIGNED);
6383 /* Calculate minimum for the remaining dimensions. */
6384 else
6386 folded_deps[len + i - 1] = offset;
6387 if (folded_dep == c)
6388 folded_deps[i] = offset;
6389 else if (maybe_lexically_later
6390 && !wi::eq_p (folded_deps[i], offset))
6392 if (forward ^ wi::gts_p (folded_deps[i], offset))
6394 unsigned int j;
6395 folded_dep = c;
6396 for (j = 1; j <= i; j++)
6397 folded_deps[j] = folded_deps[len + j - 1];
6399 else
6400 maybe_lexically_later = false;
6404 gcc_assert (i == len);
6406 remove = true;
6408 next_ordered_clause:
6409 if (remove)
6410 *list_p = OMP_CLAUSE_CHAIN (c);
6411 else
6412 list_p = &OMP_CLAUSE_CHAIN (c);
6415 if (folded_dep)
6417 if (neg_offset_p)
6418 folded_deps[0] = -folded_deps[0];
6420 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6421 if (POINTER_TYPE_P (itype))
6422 itype = sizetype;
6424 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6425 = wide_int_to_tree (itype, folded_deps[0]);
6426 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6427 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6430 lower_omp_ordered_ret:
6432 /* Ordered without clauses is #pragma omp threads, while we want
6433 a nop instead if we remove all clauses. */
6434 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6435 gsi_replace (gsi_p, gimple_build_nop (), true);
6439 /* Expand code for an OpenMP ordered directive. */
6441 static void
6442 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6444 tree block;
6445 gimple *stmt = gsi_stmt (*gsi_p), *g;
6446 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6447 gcall *x;
6448 gbind *bind;
6449 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6450 OMP_CLAUSE_SIMD);
6451 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6452 loop. */
6453 bool maybe_simt
6454 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6455 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6456 OMP_CLAUSE_THREADS);
6458 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6459 OMP_CLAUSE_DEPEND))
6461 /* FIXME: This is needs to be moved to the expansion to verify various
6462 conditions only testable on cfg with dominators computed, and also
6463 all the depend clauses to be merged still might need to be available
6464 for the runtime checks. */
6465 if (0)
6466 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6467 return;
6470 push_gimplify_context ();
6472 block = make_node (BLOCK);
6473 bind = gimple_build_bind (NULL, NULL, block);
6474 gsi_replace (gsi_p, bind, true);
6475 gimple_bind_add_stmt (bind, stmt);
6477 if (simd)
6479 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6480 build_int_cst (NULL_TREE, threads));
6481 cfun->has_simduid_loops = true;
6483 else
6484 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6486 gimple_bind_add_stmt (bind, x);
6488 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6489 if (maybe_simt)
6491 counter = create_tmp_var (integer_type_node);
6492 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6493 gimple_call_set_lhs (g, counter);
6494 gimple_bind_add_stmt (bind, g);
6496 body = create_artificial_label (UNKNOWN_LOCATION);
6497 test = create_artificial_label (UNKNOWN_LOCATION);
6498 gimple_bind_add_stmt (bind, gimple_build_label (body));
6500 tree simt_pred = create_tmp_var (integer_type_node);
6501 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6502 gimple_call_set_lhs (g, simt_pred);
6503 gimple_bind_add_stmt (bind, g);
6505 tree t = create_artificial_label (UNKNOWN_LOCATION);
6506 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6507 gimple_bind_add_stmt (bind, g);
6509 gimple_bind_add_stmt (bind, gimple_build_label (t));
6511 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6512 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6513 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6514 gimple_omp_set_body (stmt, NULL);
6516 if (maybe_simt)
6518 gimple_bind_add_stmt (bind, gimple_build_label (test));
6519 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6520 gimple_bind_add_stmt (bind, g);
6522 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6523 tree nonneg = create_tmp_var (integer_type_node);
6524 gimple_seq tseq = NULL;
6525 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6526 gimple_bind_add_seq (bind, tseq);
6528 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6529 gimple_call_set_lhs (g, nonneg);
6530 gimple_bind_add_stmt (bind, g);
6532 tree end = create_artificial_label (UNKNOWN_LOCATION);
6533 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6534 gimple_bind_add_stmt (bind, g);
6536 gimple_bind_add_stmt (bind, gimple_build_label (end));
6538 if (simd)
6539 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6540 build_int_cst (NULL_TREE, threads));
6541 else
6542 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6544 gimple_bind_add_stmt (bind, x);
6546 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6548 pop_gimplify_context (bind);
6550 gimple_bind_append_vars (bind, ctx->block_vars);
6551 BLOCK_VARS (block) = gimple_bind_vars (bind);
6555 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6556 substitution of a couple of function calls. But in the NAMED case,
6557 requires that languages coordinate a symbol name. It is therefore
6558 best put here in common code. */
6560 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6562 static void
6563 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6565 tree block;
6566 tree name, lock, unlock;
6567 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6568 gbind *bind;
6569 location_t loc = gimple_location (stmt);
6570 gimple_seq tbody;
6572 name = gimple_omp_critical_name (stmt);
6573 if (name)
6575 tree decl;
6577 if (!critical_name_mutexes)
6578 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6580 tree *n = critical_name_mutexes->get (name);
6581 if (n == NULL)
6583 char *new_str;
6585 decl = create_tmp_var_raw (ptr_type_node);
6587 new_str = ACONCAT ((".gomp_critical_user_",
6588 IDENTIFIER_POINTER (name), NULL));
6589 DECL_NAME (decl) = get_identifier (new_str);
6590 TREE_PUBLIC (decl) = 1;
6591 TREE_STATIC (decl) = 1;
6592 DECL_COMMON (decl) = 1;
6593 DECL_ARTIFICIAL (decl) = 1;
6594 DECL_IGNORED_P (decl) = 1;
6596 varpool_node::finalize_decl (decl);
6598 critical_name_mutexes->put (name, decl);
6600 else
6601 decl = *n;
6603 /* If '#pragma omp critical' is inside offloaded region or
6604 inside function marked as offloadable, the symbol must be
6605 marked as offloadable too. */
6606 omp_context *octx;
6607 if (cgraph_node::get (current_function_decl)->offloadable)
6608 varpool_node::get_create (decl)->offloadable = 1;
6609 else
6610 for (octx = ctx->outer; octx; octx = octx->outer)
6611 if (is_gimple_omp_offloaded (octx->stmt))
6613 varpool_node::get_create (decl)->offloadable = 1;
6614 break;
6617 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6618 lock = build_call_expr_loc (loc, lock, 1,
6619 build_fold_addr_expr_loc (loc, decl));
6621 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6622 unlock = build_call_expr_loc (loc, unlock, 1,
6623 build_fold_addr_expr_loc (loc, decl));
6625 else
6627 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6628 lock = build_call_expr_loc (loc, lock, 0);
6630 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6631 unlock = build_call_expr_loc (loc, unlock, 0);
6634 push_gimplify_context ();
6636 block = make_node (BLOCK);
6637 bind = gimple_build_bind (NULL, NULL, block);
6638 gsi_replace (gsi_p, bind, true);
6639 gimple_bind_add_stmt (bind, stmt);
6641 tbody = gimple_bind_body (bind);
6642 gimplify_and_add (lock, &tbody);
6643 gimple_bind_set_body (bind, tbody);
6645 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6646 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6647 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6648 gimple_omp_set_body (stmt, NULL);
6650 tbody = gimple_bind_body (bind);
6651 gimplify_and_add (unlock, &tbody);
6652 gimple_bind_set_body (bind, tbody);
6654 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6656 pop_gimplify_context (bind);
6657 gimple_bind_append_vars (bind, ctx->block_vars);
6658 BLOCK_VARS (block) = gimple_bind_vars (bind);
6661 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6662 for a lastprivate clause. Given a loop control predicate of (V
6663 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6664 is appended to *DLIST, iterator initialization is appended to
6665 *BODY_P. */
6667 static void
6668 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6669 gimple_seq *dlist, struct omp_context *ctx)
6671 tree clauses, cond, vinit;
6672 enum tree_code cond_code;
6673 gimple_seq stmts;
6675 cond_code = fd->loop.cond_code;
6676 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6678 /* When possible, use a strict equality expression. This can let VRP
6679 type optimizations deduce the value and remove a copy. */
6680 if (tree_fits_shwi_p (fd->loop.step))
6682 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6683 if (step == 1 || step == -1)
6684 cond_code = EQ_EXPR;
6687 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6688 || gimple_omp_for_grid_phony (fd->for_stmt))
6689 cond = omp_grid_lastprivate_predicate (fd);
6690 else
6692 tree n2 = fd->loop.n2;
6693 if (fd->collapse > 1
6694 && TREE_CODE (n2) != INTEGER_CST
6695 && gimple_omp_for_combined_into_p (fd->for_stmt))
6697 struct omp_context *taskreg_ctx = NULL;
6698 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6700 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6701 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6702 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6704 if (gimple_omp_for_combined_into_p (gfor))
6706 gcc_assert (ctx->outer->outer
6707 && is_parallel_ctx (ctx->outer->outer));
6708 taskreg_ctx = ctx->outer->outer;
6710 else
6712 struct omp_for_data outer_fd;
6713 omp_extract_for_data (gfor, &outer_fd, NULL);
6714 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6717 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6718 taskreg_ctx = ctx->outer->outer;
6720 else if (is_taskreg_ctx (ctx->outer))
6721 taskreg_ctx = ctx->outer;
6722 if (taskreg_ctx)
6724 int i;
6725 tree taskreg_clauses
6726 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6727 tree innerc = omp_find_clause (taskreg_clauses,
6728 OMP_CLAUSE__LOOPTEMP_);
6729 gcc_assert (innerc);
6730 for (i = 0; i < fd->collapse; i++)
6732 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6733 OMP_CLAUSE__LOOPTEMP_);
6734 gcc_assert (innerc);
6736 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6737 OMP_CLAUSE__LOOPTEMP_);
6738 if (innerc)
6739 n2 = fold_convert (TREE_TYPE (n2),
6740 lookup_decl (OMP_CLAUSE_DECL (innerc),
6741 taskreg_ctx));
6744 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6747 clauses = gimple_omp_for_clauses (fd->for_stmt);
6748 stmts = NULL;
6749 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6750 if (!gimple_seq_empty_p (stmts))
6752 gimple_seq_add_seq (&stmts, *dlist);
6753 *dlist = stmts;
6755 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6756 vinit = fd->loop.n1;
6757 if (cond_code == EQ_EXPR
6758 && tree_fits_shwi_p (fd->loop.n2)
6759 && ! integer_zerop (fd->loop.n2))
6760 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6761 else
6762 vinit = unshare_expr (vinit);
6764 /* Initialize the iterator variable, so that threads that don't execute
6765 any iterations don't execute the lastprivate clauses by accident. */
6766 gimplify_assign (fd->loop.v, vinit, body_p);
6771 /* Lower code for an OMP loop directive. */
6773 static void
6774 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6776 tree *rhs_p, block;
6777 struct omp_for_data fd, *fdp = NULL;
6778 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6779 gbind *new_stmt;
6780 gimple_seq omp_for_body, body, dlist;
6781 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6782 size_t i;
6784 push_gimplify_context ();
6786 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6788 block = make_node (BLOCK);
6789 new_stmt = gimple_build_bind (NULL, NULL, block);
6790 /* Replace at gsi right away, so that 'stmt' is no member
6791 of a sequence anymore as we're going to add to a different
6792 one below. */
6793 gsi_replace (gsi_p, new_stmt, true);
6795 /* Move declaration of temporaries in the loop body before we make
6796 it go away. */
6797 omp_for_body = gimple_omp_body (stmt);
6798 if (!gimple_seq_empty_p (omp_for_body)
6799 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6801 gbind *inner_bind
6802 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6803 tree vars = gimple_bind_vars (inner_bind);
6804 gimple_bind_append_vars (new_stmt, vars);
6805 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6806 keep them on the inner_bind and it's block. */
6807 gimple_bind_set_vars (inner_bind, NULL_TREE);
6808 if (gimple_bind_block (inner_bind))
6809 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6812 if (gimple_omp_for_combined_into_p (stmt))
6814 omp_extract_for_data (stmt, &fd, NULL);
6815 fdp = &fd;
6817 /* We need two temporaries with fd.loop.v type (istart/iend)
6818 and then (fd.collapse - 1) temporaries with the same
6819 type for count2 ... countN-1 vars if not constant. */
6820 size_t count = 2;
6821 tree type = fd.iter_type;
6822 if (fd.collapse > 1
6823 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6824 count += fd.collapse - 1;
6825 bool taskreg_for
6826 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6827 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6828 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6829 tree simtc = NULL;
6830 tree clauses = *pc;
6831 if (taskreg_for)
6832 outerc
6833 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6834 OMP_CLAUSE__LOOPTEMP_);
6835 if (ctx->simt_stmt)
6836 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6837 OMP_CLAUSE__LOOPTEMP_);
6838 for (i = 0; i < count; i++)
6840 tree temp;
6841 if (taskreg_for)
6843 gcc_assert (outerc);
6844 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6845 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6846 OMP_CLAUSE__LOOPTEMP_);
6848 else
6850 /* If there are 2 adjacent SIMD stmts, one with _simt_
6851 clause, another without, make sure they have the same
6852 decls in _looptemp_ clauses, because the outer stmt
6853 they are combined into will look up just one inner_stmt. */
6854 if (ctx->simt_stmt)
6855 temp = OMP_CLAUSE_DECL (simtc);
6856 else
6857 temp = create_tmp_var (type);
6858 insert_decl_map (&ctx->outer->cb, temp, temp);
6860 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6861 OMP_CLAUSE_DECL (*pc) = temp;
6862 pc = &OMP_CLAUSE_CHAIN (*pc);
6863 if (ctx->simt_stmt)
6864 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6865 OMP_CLAUSE__LOOPTEMP_);
6867 *pc = clauses;
6870 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6871 dlist = NULL;
6872 body = NULL;
6873 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6874 fdp);
6875 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6877 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6879 /* Lower the header expressions. At this point, we can assume that
6880 the header is of the form:
6882 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6884 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6885 using the .omp_data_s mapping, if needed. */
6886 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6888 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6889 if (!is_gimple_min_invariant (*rhs_p))
6890 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6892 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6893 if (!is_gimple_min_invariant (*rhs_p))
6894 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6896 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6897 if (!is_gimple_min_invariant (*rhs_p))
6898 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6901 /* Once lowered, extract the bounds and clauses. */
6902 omp_extract_for_data (stmt, &fd, NULL);
6904 if (is_gimple_omp_oacc (ctx->stmt)
6905 && !ctx_in_oacc_kernels_region (ctx))
6906 lower_oacc_head_tail (gimple_location (stmt),
6907 gimple_omp_for_clauses (stmt),
6908 &oacc_head, &oacc_tail, ctx);
6910 /* Add OpenACC partitioning and reduction markers just before the loop. */
6911 if (oacc_head)
6912 gimple_seq_add_seq (&body, oacc_head);
6914 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6916 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6917 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6918 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6919 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6921 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6922 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6923 OMP_CLAUSE_LINEAR_STEP (c)
6924 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6925 ctx);
6928 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6929 && gimple_omp_for_grid_phony (stmt));
6930 if (!phony_loop)
6931 gimple_seq_add_stmt (&body, stmt);
6932 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6934 if (!phony_loop)
6935 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6936 fd.loop.v));
6938 /* After the loop, add exit clauses. */
6939 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6941 if (ctx->cancellable)
6942 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6944 gimple_seq_add_seq (&body, dlist);
6946 body = maybe_catch_exception (body);
6948 if (!phony_loop)
6950 /* Region exit marker goes at the end of the loop body. */
6951 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6952 maybe_add_implicit_barrier_cancel (ctx, &body);
6955 /* Add OpenACC joining and reduction markers just after the loop. */
6956 if (oacc_tail)
6957 gimple_seq_add_seq (&body, oacc_tail);
6959 pop_gimplify_context (new_stmt);
6961 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6962 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6963 if (BLOCK_VARS (block))
6964 TREE_USED (block) = 1;
6966 gimple_bind_set_body (new_stmt, body);
6967 gimple_omp_set_body (stmt, NULL);
6968 gimple_omp_for_set_pre_body (stmt, NULL);
6971 /* Callback for walk_stmts. Check if the current statement only contains
6972 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6974 static tree
6975 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6976 bool *handled_ops_p,
6977 struct walk_stmt_info *wi)
6979 int *info = (int *) wi->info;
6980 gimple *stmt = gsi_stmt (*gsi_p);
6982 *handled_ops_p = true;
6983 switch (gimple_code (stmt))
6985 WALK_SUBSTMTS;
6987 case GIMPLE_OMP_FOR:
6988 case GIMPLE_OMP_SECTIONS:
6989 *info = *info == 0 ? 1 : -1;
6990 break;
6991 default:
6992 *info = -1;
6993 break;
6995 return NULL;
6998 struct omp_taskcopy_context
7000 /* This field must be at the beginning, as we do "inheritance": Some
7001 callback functions for tree-inline.c (e.g., omp_copy_decl)
7002 receive a copy_body_data pointer that is up-casted to an
7003 omp_context pointer. */
7004 copy_body_data cb;
7005 omp_context *ctx;
7008 static tree
7009 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7011 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7013 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7014 return create_tmp_var (TREE_TYPE (var));
7016 return var;
7019 static tree
7020 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7022 tree name, new_fields = NULL, type, f;
7024 type = lang_hooks.types.make_type (RECORD_TYPE);
7025 name = DECL_NAME (TYPE_NAME (orig_type));
7026 name = build_decl (gimple_location (tcctx->ctx->stmt),
7027 TYPE_DECL, name, type);
7028 TYPE_NAME (type) = name;
7030 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7032 tree new_f = copy_node (f);
7033 DECL_CONTEXT (new_f) = type;
7034 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7035 TREE_CHAIN (new_f) = new_fields;
7036 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7037 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7038 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7039 &tcctx->cb, NULL);
7040 new_fields = new_f;
7041 tcctx->cb.decl_map->put (f, new_f);
7043 TYPE_FIELDS (type) = nreverse (new_fields);
7044 layout_type (type);
7045 return type;
7048 /* Create task copyfn. */
7050 static void
7051 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7053 struct function *child_cfun;
7054 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7055 tree record_type, srecord_type, bind, list;
7056 bool record_needs_remap = false, srecord_needs_remap = false;
7057 splay_tree_node n;
7058 struct omp_taskcopy_context tcctx;
7059 location_t loc = gimple_location (task_stmt);
7061 child_fn = gimple_omp_task_copy_fn (task_stmt);
7062 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7063 gcc_assert (child_cfun->cfg == NULL);
7064 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7066 /* Reset DECL_CONTEXT on function arguments. */
7067 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7068 DECL_CONTEXT (t) = child_fn;
7070 /* Populate the function. */
7071 push_gimplify_context ();
7072 push_cfun (child_cfun);
7074 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7075 TREE_SIDE_EFFECTS (bind) = 1;
7076 list = NULL;
7077 DECL_SAVED_TREE (child_fn) = bind;
7078 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7080 /* Remap src and dst argument types if needed. */
7081 record_type = ctx->record_type;
7082 srecord_type = ctx->srecord_type;
7083 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7084 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7086 record_needs_remap = true;
7087 break;
7089 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7090 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7092 srecord_needs_remap = true;
7093 break;
7096 if (record_needs_remap || srecord_needs_remap)
7098 memset (&tcctx, '\0', sizeof (tcctx));
7099 tcctx.cb.src_fn = ctx->cb.src_fn;
7100 tcctx.cb.dst_fn = child_fn;
7101 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7102 gcc_checking_assert (tcctx.cb.src_node);
7103 tcctx.cb.dst_node = tcctx.cb.src_node;
7104 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7105 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7106 tcctx.cb.eh_lp_nr = 0;
7107 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7108 tcctx.cb.decl_map = new hash_map<tree, tree>;
7109 tcctx.ctx = ctx;
7111 if (record_needs_remap)
7112 record_type = task_copyfn_remap_type (&tcctx, record_type);
7113 if (srecord_needs_remap)
7114 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7116 else
7117 tcctx.cb.decl_map = NULL;
7119 arg = DECL_ARGUMENTS (child_fn);
7120 TREE_TYPE (arg) = build_pointer_type (record_type);
7121 sarg = DECL_CHAIN (arg);
7122 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7124 /* First pass: initialize temporaries used in record_type and srecord_type
7125 sizes and field offsets. */
7126 if (tcctx.cb.decl_map)
7127 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7128 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7130 tree *p;
7132 decl = OMP_CLAUSE_DECL (c);
7133 p = tcctx.cb.decl_map->get (decl);
7134 if (p == NULL)
7135 continue;
7136 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7137 sf = (tree) n->value;
7138 sf = *tcctx.cb.decl_map->get (sf);
7139 src = build_simple_mem_ref_loc (loc, sarg);
7140 src = omp_build_component_ref (src, sf);
7141 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7142 append_to_statement_list (t, &list);
7145 /* Second pass: copy shared var pointers and copy construct non-VLA
7146 firstprivate vars. */
7147 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7148 switch (OMP_CLAUSE_CODE (c))
7150 splay_tree_key key;
7151 case OMP_CLAUSE_SHARED:
7152 decl = OMP_CLAUSE_DECL (c);
7153 key = (splay_tree_key) decl;
7154 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7155 key = (splay_tree_key) &DECL_UID (decl);
7156 n = splay_tree_lookup (ctx->field_map, key);
7157 if (n == NULL)
7158 break;
7159 f = (tree) n->value;
7160 if (tcctx.cb.decl_map)
7161 f = *tcctx.cb.decl_map->get (f);
7162 n = splay_tree_lookup (ctx->sfield_map, key);
7163 sf = (tree) n->value;
7164 if (tcctx.cb.decl_map)
7165 sf = *tcctx.cb.decl_map->get (sf);
7166 src = build_simple_mem_ref_loc (loc, sarg);
7167 src = omp_build_component_ref (src, sf);
7168 dst = build_simple_mem_ref_loc (loc, arg);
7169 dst = omp_build_component_ref (dst, f);
7170 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7171 append_to_statement_list (t, &list);
7172 break;
7173 case OMP_CLAUSE_FIRSTPRIVATE:
7174 decl = OMP_CLAUSE_DECL (c);
7175 if (is_variable_sized (decl))
7176 break;
7177 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7178 if (n == NULL)
7179 break;
7180 f = (tree) n->value;
7181 if (tcctx.cb.decl_map)
7182 f = *tcctx.cb.decl_map->get (f);
7183 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7184 if (n != NULL)
7186 sf = (tree) n->value;
7187 if (tcctx.cb.decl_map)
7188 sf = *tcctx.cb.decl_map->get (sf);
7189 src = build_simple_mem_ref_loc (loc, sarg);
7190 src = omp_build_component_ref (src, sf);
7191 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7192 src = build_simple_mem_ref_loc (loc, src);
7194 else
7195 src = decl;
7196 dst = build_simple_mem_ref_loc (loc, arg);
7197 dst = omp_build_component_ref (dst, f);
7198 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7199 append_to_statement_list (t, &list);
7200 break;
7201 case OMP_CLAUSE_PRIVATE:
7202 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7203 break;
7204 decl = OMP_CLAUSE_DECL (c);
7205 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7206 f = (tree) n->value;
7207 if (tcctx.cb.decl_map)
7208 f = *tcctx.cb.decl_map->get (f);
7209 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7210 if (n != NULL)
7212 sf = (tree) n->value;
7213 if (tcctx.cb.decl_map)
7214 sf = *tcctx.cb.decl_map->get (sf);
7215 src = build_simple_mem_ref_loc (loc, sarg);
7216 src = omp_build_component_ref (src, sf);
7217 if (use_pointer_for_field (decl, NULL))
7218 src = build_simple_mem_ref_loc (loc, src);
7220 else
7221 src = decl;
7222 dst = build_simple_mem_ref_loc (loc, arg);
7223 dst = omp_build_component_ref (dst, f);
7224 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7225 append_to_statement_list (t, &list);
7226 break;
7227 default:
7228 break;
7231 /* Last pass: handle VLA firstprivates. */
7232 if (tcctx.cb.decl_map)
7233 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7234 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7236 tree ind, ptr, df;
7238 decl = OMP_CLAUSE_DECL (c);
7239 if (!is_variable_sized (decl))
7240 continue;
7241 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7242 if (n == NULL)
7243 continue;
7244 f = (tree) n->value;
7245 f = *tcctx.cb.decl_map->get (f);
7246 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7247 ind = DECL_VALUE_EXPR (decl);
7248 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7249 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7250 n = splay_tree_lookup (ctx->sfield_map,
7251 (splay_tree_key) TREE_OPERAND (ind, 0));
7252 sf = (tree) n->value;
7253 sf = *tcctx.cb.decl_map->get (sf);
7254 src = build_simple_mem_ref_loc (loc, sarg);
7255 src = omp_build_component_ref (src, sf);
7256 src = build_simple_mem_ref_loc (loc, src);
7257 dst = build_simple_mem_ref_loc (loc, arg);
7258 dst = omp_build_component_ref (dst, f);
7259 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7260 append_to_statement_list (t, &list);
7261 n = splay_tree_lookup (ctx->field_map,
7262 (splay_tree_key) TREE_OPERAND (ind, 0));
7263 df = (tree) n->value;
7264 df = *tcctx.cb.decl_map->get (df);
7265 ptr = build_simple_mem_ref_loc (loc, arg);
7266 ptr = omp_build_component_ref (ptr, df);
7267 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7268 build_fold_addr_expr_loc (loc, dst));
7269 append_to_statement_list (t, &list);
7272 t = build1 (RETURN_EXPR, void_type_node, NULL);
7273 append_to_statement_list (t, &list);
7275 if (tcctx.cb.decl_map)
7276 delete tcctx.cb.decl_map;
7277 pop_gimplify_context (NULL);
7278 BIND_EXPR_BODY (bind) = list;
7279 pop_cfun ();
7282 static void
7283 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7285 tree c, clauses;
7286 gimple *g;
7287 size_t n_in = 0, n_out = 0, idx = 2, i;
7289 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7290 gcc_assert (clauses);
7291 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7292 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7293 switch (OMP_CLAUSE_DEPEND_KIND (c))
7295 case OMP_CLAUSE_DEPEND_IN:
7296 n_in++;
7297 break;
7298 case OMP_CLAUSE_DEPEND_OUT:
7299 case OMP_CLAUSE_DEPEND_INOUT:
7300 n_out++;
7301 break;
7302 case OMP_CLAUSE_DEPEND_SOURCE:
7303 case OMP_CLAUSE_DEPEND_SINK:
7304 /* FALLTHRU */
7305 default:
7306 gcc_unreachable ();
7308 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7309 tree array = create_tmp_var (type);
7310 TREE_ADDRESSABLE (array) = 1;
7311 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7312 NULL_TREE);
7313 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7314 gimple_seq_add_stmt (iseq, g);
7315 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7316 NULL_TREE);
7317 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7318 gimple_seq_add_stmt (iseq, g);
7319 for (i = 0; i < 2; i++)
7321 if ((i ? n_in : n_out) == 0)
7322 continue;
7323 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7324 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7325 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7327 tree t = OMP_CLAUSE_DECL (c);
7328 t = fold_convert (ptr_type_node, t);
7329 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7330 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7331 NULL_TREE, NULL_TREE);
7332 g = gimple_build_assign (r, t);
7333 gimple_seq_add_stmt (iseq, g);
7336 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7337 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7338 OMP_CLAUSE_CHAIN (c) = *pclauses;
7339 *pclauses = c;
7340 tree clobber = build_constructor (type, NULL);
7341 TREE_THIS_VOLATILE (clobber) = 1;
7342 g = gimple_build_assign (array, clobber);
7343 gimple_seq_add_stmt (oseq, g);
7346 /* Lower the OpenMP parallel or task directive in the current statement
7347 in GSI_P. CTX holds context information for the directive. */
7349 static void
7350 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7352 tree clauses;
7353 tree child_fn, t;
7354 gimple *stmt = gsi_stmt (*gsi_p);
7355 gbind *par_bind, *bind, *dep_bind = NULL;
7356 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7357 location_t loc = gimple_location (stmt);
7359 clauses = gimple_omp_taskreg_clauses (stmt);
7360 par_bind
7361 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7362 par_body = gimple_bind_body (par_bind);
7363 child_fn = ctx->cb.dst_fn;
7364 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7365 && !gimple_omp_parallel_combined_p (stmt))
7367 struct walk_stmt_info wi;
7368 int ws_num = 0;
7370 memset (&wi, 0, sizeof (wi));
7371 wi.info = &ws_num;
7372 wi.val_only = true;
7373 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7374 if (ws_num == 1)
7375 gimple_omp_parallel_set_combined_p (stmt, true);
7377 gimple_seq dep_ilist = NULL;
7378 gimple_seq dep_olist = NULL;
7379 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7380 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7382 push_gimplify_context ();
7383 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7384 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7385 &dep_ilist, &dep_olist);
7388 if (ctx->srecord_type)
7389 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7391 push_gimplify_context ();
7393 par_olist = NULL;
7394 par_ilist = NULL;
7395 par_rlist = NULL;
7396 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7397 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7398 if (phony_construct && ctx->record_type)
7400 gcc_checking_assert (!ctx->receiver_decl);
7401 ctx->receiver_decl = create_tmp_var
7402 (build_reference_type (ctx->record_type), ".omp_rec");
7404 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7405 lower_omp (&par_body, ctx);
7406 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7407 lower_reduction_clauses (clauses, &par_rlist, ctx);
7409 /* Declare all the variables created by mapping and the variables
7410 declared in the scope of the parallel body. */
7411 record_vars_into (ctx->block_vars, child_fn);
7412 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7414 if (ctx->record_type)
7416 ctx->sender_decl
7417 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7418 : ctx->record_type, ".omp_data_o");
7419 DECL_NAMELESS (ctx->sender_decl) = 1;
7420 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7421 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7424 olist = NULL;
7425 ilist = NULL;
7426 lower_send_clauses (clauses, &ilist, &olist, ctx);
7427 lower_send_shared_vars (&ilist, &olist, ctx);
7429 if (ctx->record_type)
7431 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7432 TREE_THIS_VOLATILE (clobber) = 1;
7433 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7434 clobber));
7437 /* Once all the expansions are done, sequence all the different
7438 fragments inside gimple_omp_body. */
7440 new_body = NULL;
7442 if (ctx->record_type)
7444 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7445 /* fixup_child_record_type might have changed receiver_decl's type. */
7446 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7447 gimple_seq_add_stmt (&new_body,
7448 gimple_build_assign (ctx->receiver_decl, t));
7451 gimple_seq_add_seq (&new_body, par_ilist);
7452 gimple_seq_add_seq (&new_body, par_body);
7453 gimple_seq_add_seq (&new_body, par_rlist);
7454 if (ctx->cancellable)
7455 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7456 gimple_seq_add_seq (&new_body, par_olist);
7457 new_body = maybe_catch_exception (new_body);
7458 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7459 gimple_seq_add_stmt (&new_body,
7460 gimple_build_omp_continue (integer_zero_node,
7461 integer_zero_node));
7462 if (!phony_construct)
7464 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7465 gimple_omp_set_body (stmt, new_body);
7468 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7469 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7470 gimple_bind_add_seq (bind, ilist);
7471 if (!phony_construct)
7472 gimple_bind_add_stmt (bind, stmt);
7473 else
7474 gimple_bind_add_seq (bind, new_body);
7475 gimple_bind_add_seq (bind, olist);
7477 pop_gimplify_context (NULL);
7479 if (dep_bind)
7481 gimple_bind_add_seq (dep_bind, dep_ilist);
7482 gimple_bind_add_stmt (dep_bind, bind);
7483 gimple_bind_add_seq (dep_bind, dep_olist);
7484 pop_gimplify_context (dep_bind);
7488 /* Lower the GIMPLE_OMP_TARGET in the current statement
7489 in GSI_P. CTX holds context information for the directive. */
7491 static void
7492 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7494 tree clauses;
7495 tree child_fn, t, c;
7496 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7497 gbind *tgt_bind, *bind, *dep_bind = NULL;
7498 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7499 location_t loc = gimple_location (stmt);
7500 bool offloaded, data_region;
7501 unsigned int map_cnt = 0;
7503 offloaded = is_gimple_omp_offloaded (stmt);
7504 switch (gimple_omp_target_kind (stmt))
7506 case GF_OMP_TARGET_KIND_REGION:
7507 case GF_OMP_TARGET_KIND_UPDATE:
7508 case GF_OMP_TARGET_KIND_ENTER_DATA:
7509 case GF_OMP_TARGET_KIND_EXIT_DATA:
7510 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7511 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7512 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7513 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7514 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7515 data_region = false;
7516 break;
7517 case GF_OMP_TARGET_KIND_DATA:
7518 case GF_OMP_TARGET_KIND_OACC_DATA:
7519 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7520 data_region = true;
7521 break;
7522 default:
7523 gcc_unreachable ();
7526 clauses = gimple_omp_target_clauses (stmt);
7528 gimple_seq dep_ilist = NULL;
7529 gimple_seq dep_olist = NULL;
7530 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7532 push_gimplify_context ();
7533 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7534 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7535 &dep_ilist, &dep_olist);
7538 tgt_bind = NULL;
7539 tgt_body = NULL;
7540 if (offloaded)
7542 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7543 tgt_body = gimple_bind_body (tgt_bind);
7545 else if (data_region)
7546 tgt_body = gimple_omp_body (stmt);
7547 child_fn = ctx->cb.dst_fn;
7549 push_gimplify_context ();
7550 fplist = NULL;
7552 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7553 switch (OMP_CLAUSE_CODE (c))
7555 tree var, x;
7557 default:
7558 break;
7559 case OMP_CLAUSE_MAP:
7560 #if CHECKING_P
7561 /* First check what we're prepared to handle in the following. */
7562 switch (OMP_CLAUSE_MAP_KIND (c))
7564 case GOMP_MAP_ALLOC:
7565 case GOMP_MAP_TO:
7566 case GOMP_MAP_FROM:
7567 case GOMP_MAP_TOFROM:
7568 case GOMP_MAP_POINTER:
7569 case GOMP_MAP_TO_PSET:
7570 case GOMP_MAP_DELETE:
7571 case GOMP_MAP_RELEASE:
7572 case GOMP_MAP_ALWAYS_TO:
7573 case GOMP_MAP_ALWAYS_FROM:
7574 case GOMP_MAP_ALWAYS_TOFROM:
7575 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7576 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7577 case GOMP_MAP_STRUCT:
7578 case GOMP_MAP_ALWAYS_POINTER:
7579 break;
7580 case GOMP_MAP_FORCE_ALLOC:
7581 case GOMP_MAP_FORCE_TO:
7582 case GOMP_MAP_FORCE_FROM:
7583 case GOMP_MAP_FORCE_TOFROM:
7584 case GOMP_MAP_FORCE_PRESENT:
7585 case GOMP_MAP_FORCE_DEVICEPTR:
7586 case GOMP_MAP_DEVICE_RESIDENT:
7587 case GOMP_MAP_LINK:
7588 gcc_assert (is_gimple_omp_oacc (stmt));
7589 break;
7590 default:
7591 gcc_unreachable ();
7593 #endif
7594 /* FALLTHRU */
7595 case OMP_CLAUSE_TO:
7596 case OMP_CLAUSE_FROM:
7597 oacc_firstprivate:
7598 var = OMP_CLAUSE_DECL (c);
7599 if (!DECL_P (var))
7601 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7602 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7603 && (OMP_CLAUSE_MAP_KIND (c)
7604 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7605 map_cnt++;
7606 continue;
7609 if (DECL_SIZE (var)
7610 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7612 tree var2 = DECL_VALUE_EXPR (var);
7613 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7614 var2 = TREE_OPERAND (var2, 0);
7615 gcc_assert (DECL_P (var2));
7616 var = var2;
7619 if (offloaded
7620 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7621 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7622 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7624 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7626 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7627 && varpool_node::get_create (var)->offloadable)
7628 continue;
7630 tree type = build_pointer_type (TREE_TYPE (var));
7631 tree new_var = lookup_decl (var, ctx);
7632 x = create_tmp_var_raw (type, get_name (new_var));
7633 gimple_add_tmp_var (x);
7634 x = build_simple_mem_ref (x);
7635 SET_DECL_VALUE_EXPR (new_var, x);
7636 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7638 continue;
7641 if (!maybe_lookup_field (var, ctx))
7642 continue;
7644 /* Don't remap oacc parallel reduction variables, because the
7645 intermediate result must be local to each gang. */
7646 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7647 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7649 x = build_receiver_ref (var, true, ctx);
7650 tree new_var = lookup_decl (var, ctx);
7652 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7653 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7654 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7655 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7656 x = build_simple_mem_ref (x);
7657 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7659 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7660 if (omp_is_reference (new_var))
7662 /* Create a local object to hold the instance
7663 value. */
7664 tree type = TREE_TYPE (TREE_TYPE (new_var));
7665 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7666 tree inst = create_tmp_var (type, id);
7667 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7668 x = build_fold_addr_expr (inst);
7670 gimplify_assign (new_var, x, &fplist);
7672 else if (DECL_P (new_var))
7674 SET_DECL_VALUE_EXPR (new_var, x);
7675 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7677 else
7678 gcc_unreachable ();
7680 map_cnt++;
7681 break;
7683 case OMP_CLAUSE_FIRSTPRIVATE:
7684 if (is_oacc_parallel (ctx))
7685 goto oacc_firstprivate;
7686 map_cnt++;
7687 var = OMP_CLAUSE_DECL (c);
7688 if (!omp_is_reference (var)
7689 && !is_gimple_reg_type (TREE_TYPE (var)))
7691 tree new_var = lookup_decl (var, ctx);
7692 if (is_variable_sized (var))
7694 tree pvar = DECL_VALUE_EXPR (var);
7695 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7696 pvar = TREE_OPERAND (pvar, 0);
7697 gcc_assert (DECL_P (pvar));
7698 tree new_pvar = lookup_decl (pvar, ctx);
7699 x = build_fold_indirect_ref (new_pvar);
7700 TREE_THIS_NOTRAP (x) = 1;
7702 else
7703 x = build_receiver_ref (var, true, ctx);
7704 SET_DECL_VALUE_EXPR (new_var, x);
7705 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7707 break;
7709 case OMP_CLAUSE_PRIVATE:
7710 if (is_gimple_omp_oacc (ctx->stmt))
7711 break;
7712 var = OMP_CLAUSE_DECL (c);
7713 if (is_variable_sized (var))
7715 tree new_var = lookup_decl (var, ctx);
7716 tree pvar = DECL_VALUE_EXPR (var);
7717 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7718 pvar = TREE_OPERAND (pvar, 0);
7719 gcc_assert (DECL_P (pvar));
7720 tree new_pvar = lookup_decl (pvar, ctx);
7721 x = build_fold_indirect_ref (new_pvar);
7722 TREE_THIS_NOTRAP (x) = 1;
7723 SET_DECL_VALUE_EXPR (new_var, x);
7724 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7726 break;
7728 case OMP_CLAUSE_USE_DEVICE_PTR:
7729 case OMP_CLAUSE_IS_DEVICE_PTR:
7730 var = OMP_CLAUSE_DECL (c);
7731 map_cnt++;
7732 if (is_variable_sized (var))
7734 tree new_var = lookup_decl (var, ctx);
7735 tree pvar = DECL_VALUE_EXPR (var);
7736 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7737 pvar = TREE_OPERAND (pvar, 0);
7738 gcc_assert (DECL_P (pvar));
7739 tree new_pvar = lookup_decl (pvar, ctx);
7740 x = build_fold_indirect_ref (new_pvar);
7741 TREE_THIS_NOTRAP (x) = 1;
7742 SET_DECL_VALUE_EXPR (new_var, x);
7743 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7745 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7747 tree new_var = lookup_decl (var, ctx);
7748 tree type = build_pointer_type (TREE_TYPE (var));
7749 x = create_tmp_var_raw (type, get_name (new_var));
7750 gimple_add_tmp_var (x);
7751 x = build_simple_mem_ref (x);
7752 SET_DECL_VALUE_EXPR (new_var, x);
7753 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7755 else
7757 tree new_var = lookup_decl (var, ctx);
7758 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7759 gimple_add_tmp_var (x);
7760 SET_DECL_VALUE_EXPR (new_var, x);
7761 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7763 break;
7766 if (offloaded)
7768 target_nesting_level++;
7769 lower_omp (&tgt_body, ctx);
7770 target_nesting_level--;
7772 else if (data_region)
7773 lower_omp (&tgt_body, ctx);
7775 if (offloaded)
7777 /* Declare all the variables created by mapping and the variables
7778 declared in the scope of the target body. */
7779 record_vars_into (ctx->block_vars, child_fn);
7780 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7783 olist = NULL;
7784 ilist = NULL;
7785 if (ctx->record_type)
7787 ctx->sender_decl
7788 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7789 DECL_NAMELESS (ctx->sender_decl) = 1;
7790 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7791 t = make_tree_vec (3);
7792 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7793 TREE_VEC_ELT (t, 1)
7794 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7795 ".omp_data_sizes");
7796 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7797 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7798 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7799 tree tkind_type = short_unsigned_type_node;
7800 int talign_shift = 8;
7801 TREE_VEC_ELT (t, 2)
7802 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7803 ".omp_data_kinds");
7804 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7805 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7806 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7807 gimple_omp_target_set_data_arg (stmt, t);
7809 vec<constructor_elt, va_gc> *vsize;
7810 vec<constructor_elt, va_gc> *vkind;
7811 vec_alloc (vsize, map_cnt);
7812 vec_alloc (vkind, map_cnt);
7813 unsigned int map_idx = 0;
7815 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7816 switch (OMP_CLAUSE_CODE (c))
7818 tree ovar, nc, s, purpose, var, x, type;
7819 unsigned int talign;
7821 default:
7822 break;
7824 case OMP_CLAUSE_MAP:
7825 case OMP_CLAUSE_TO:
7826 case OMP_CLAUSE_FROM:
7827 oacc_firstprivate_map:
7828 nc = c;
7829 ovar = OMP_CLAUSE_DECL (c);
7830 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7831 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7832 || (OMP_CLAUSE_MAP_KIND (c)
7833 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7834 break;
7835 if (!DECL_P (ovar))
7837 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7838 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7840 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7841 == get_base_address (ovar));
7842 nc = OMP_CLAUSE_CHAIN (c);
7843 ovar = OMP_CLAUSE_DECL (nc);
7845 else
7847 tree x = build_sender_ref (ovar, ctx);
7848 tree v
7849 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7850 gimplify_assign (x, v, &ilist);
7851 nc = NULL_TREE;
7854 else
7856 if (DECL_SIZE (ovar)
7857 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7859 tree ovar2 = DECL_VALUE_EXPR (ovar);
7860 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7861 ovar2 = TREE_OPERAND (ovar2, 0);
7862 gcc_assert (DECL_P (ovar2));
7863 ovar = ovar2;
7865 if (!maybe_lookup_field (ovar, ctx))
7866 continue;
7869 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7870 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7871 talign = DECL_ALIGN_UNIT (ovar);
7872 if (nc)
7874 var = lookup_decl_in_outer_ctx (ovar, ctx);
7875 x = build_sender_ref (ovar, ctx);
7877 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7878 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7879 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7880 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7882 gcc_assert (offloaded);
7883 tree avar
7884 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7885 mark_addressable (avar);
7886 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7887 talign = DECL_ALIGN_UNIT (avar);
7888 avar = build_fold_addr_expr (avar);
7889 gimplify_assign (x, avar, &ilist);
7891 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7893 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7894 if (!omp_is_reference (var))
7896 if (is_gimple_reg (var)
7897 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7898 TREE_NO_WARNING (var) = 1;
7899 var = build_fold_addr_expr (var);
7901 else
7902 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7903 gimplify_assign (x, var, &ilist);
7905 else if (is_gimple_reg (var))
7907 gcc_assert (offloaded);
7908 tree avar = create_tmp_var (TREE_TYPE (var));
7909 mark_addressable (avar);
7910 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7911 if (GOMP_MAP_COPY_TO_P (map_kind)
7912 || map_kind == GOMP_MAP_POINTER
7913 || map_kind == GOMP_MAP_TO_PSET
7914 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7916 /* If we need to initialize a temporary
7917 with VAR because it is not addressable, and
7918 the variable hasn't been initialized yet, then
7919 we'll get a warning for the store to avar.
7920 Don't warn in that case, the mapping might
7921 be implicit. */
7922 TREE_NO_WARNING (var) = 1;
7923 gimplify_assign (avar, var, &ilist);
7925 avar = build_fold_addr_expr (avar);
7926 gimplify_assign (x, avar, &ilist);
7927 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7928 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7929 && !TYPE_READONLY (TREE_TYPE (var)))
7931 x = unshare_expr (x);
7932 x = build_simple_mem_ref (x);
7933 gimplify_assign (var, x, &olist);
7936 else
7938 var = build_fold_addr_expr (var);
7939 gimplify_assign (x, var, &ilist);
7942 s = NULL_TREE;
7943 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7945 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7946 s = TREE_TYPE (ovar);
7947 if (TREE_CODE (s) == REFERENCE_TYPE)
7948 s = TREE_TYPE (s);
7949 s = TYPE_SIZE_UNIT (s);
7951 else
7952 s = OMP_CLAUSE_SIZE (c);
7953 if (s == NULL_TREE)
7954 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7955 s = fold_convert (size_type_node, s);
7956 purpose = size_int (map_idx++);
7957 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7958 if (TREE_CODE (s) != INTEGER_CST)
7959 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7961 unsigned HOST_WIDE_INT tkind, tkind_zero;
7962 switch (OMP_CLAUSE_CODE (c))
7964 case OMP_CLAUSE_MAP:
7965 tkind = OMP_CLAUSE_MAP_KIND (c);
7966 tkind_zero = tkind;
7967 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7968 switch (tkind)
7970 case GOMP_MAP_ALLOC:
7971 case GOMP_MAP_TO:
7972 case GOMP_MAP_FROM:
7973 case GOMP_MAP_TOFROM:
7974 case GOMP_MAP_ALWAYS_TO:
7975 case GOMP_MAP_ALWAYS_FROM:
7976 case GOMP_MAP_ALWAYS_TOFROM:
7977 case GOMP_MAP_RELEASE:
7978 case GOMP_MAP_FORCE_TO:
7979 case GOMP_MAP_FORCE_FROM:
7980 case GOMP_MAP_FORCE_TOFROM:
7981 case GOMP_MAP_FORCE_PRESENT:
7982 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7983 break;
7984 case GOMP_MAP_DELETE:
7985 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7986 default:
7987 break;
7989 if (tkind_zero != tkind)
7991 if (integer_zerop (s))
7992 tkind = tkind_zero;
7993 else if (integer_nonzerop (s))
7994 tkind_zero = tkind;
7996 break;
7997 case OMP_CLAUSE_FIRSTPRIVATE:
7998 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7999 tkind = GOMP_MAP_TO;
8000 tkind_zero = tkind;
8001 break;
8002 case OMP_CLAUSE_TO:
8003 tkind = GOMP_MAP_TO;
8004 tkind_zero = tkind;
8005 break;
8006 case OMP_CLAUSE_FROM:
8007 tkind = GOMP_MAP_FROM;
8008 tkind_zero = tkind;
8009 break;
8010 default:
8011 gcc_unreachable ();
8013 gcc_checking_assert (tkind
8014 < (HOST_WIDE_INT_C (1U) << talign_shift));
8015 gcc_checking_assert (tkind_zero
8016 < (HOST_WIDE_INT_C (1U) << talign_shift));
8017 talign = ceil_log2 (talign);
8018 tkind |= talign << talign_shift;
8019 tkind_zero |= talign << talign_shift;
8020 gcc_checking_assert (tkind
8021 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8022 gcc_checking_assert (tkind_zero
8023 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8024 if (tkind == tkind_zero)
8025 x = build_int_cstu (tkind_type, tkind);
8026 else
8028 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8029 x = build3 (COND_EXPR, tkind_type,
8030 fold_build2 (EQ_EXPR, boolean_type_node,
8031 unshare_expr (s), size_zero_node),
8032 build_int_cstu (tkind_type, tkind_zero),
8033 build_int_cstu (tkind_type, tkind));
8035 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8036 if (nc && nc != c)
8037 c = nc;
8038 break;
8040 case OMP_CLAUSE_FIRSTPRIVATE:
8041 if (is_oacc_parallel (ctx))
8042 goto oacc_firstprivate_map;
8043 ovar = OMP_CLAUSE_DECL (c);
8044 if (omp_is_reference (ovar))
8045 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8046 else
8047 talign = DECL_ALIGN_UNIT (ovar);
8048 var = lookup_decl_in_outer_ctx (ovar, ctx);
8049 x = build_sender_ref (ovar, ctx);
8050 tkind = GOMP_MAP_FIRSTPRIVATE;
8051 type = TREE_TYPE (ovar);
8052 if (omp_is_reference (ovar))
8053 type = TREE_TYPE (type);
8054 if ((INTEGRAL_TYPE_P (type)
8055 && TYPE_PRECISION (type) <= POINTER_SIZE)
8056 || TREE_CODE (type) == POINTER_TYPE)
8058 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8059 tree t = var;
8060 if (omp_is_reference (var))
8061 t = build_simple_mem_ref (var);
8062 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8063 TREE_NO_WARNING (var) = 1;
8064 if (TREE_CODE (type) != POINTER_TYPE)
8065 t = fold_convert (pointer_sized_int_node, t);
8066 t = fold_convert (TREE_TYPE (x), t);
8067 gimplify_assign (x, t, &ilist);
8069 else if (omp_is_reference (var))
8070 gimplify_assign (x, var, &ilist);
8071 else if (is_gimple_reg (var))
8073 tree avar = create_tmp_var (TREE_TYPE (var));
8074 mark_addressable (avar);
8075 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8076 TREE_NO_WARNING (var) = 1;
8077 gimplify_assign (avar, var, &ilist);
8078 avar = build_fold_addr_expr (avar);
8079 gimplify_assign (x, avar, &ilist);
8081 else
8083 var = build_fold_addr_expr (var);
8084 gimplify_assign (x, var, &ilist);
8086 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8087 s = size_int (0);
8088 else if (omp_is_reference (ovar))
8089 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8090 else
8091 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8092 s = fold_convert (size_type_node, s);
8093 purpose = size_int (map_idx++);
8094 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8095 if (TREE_CODE (s) != INTEGER_CST)
8096 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8098 gcc_checking_assert (tkind
8099 < (HOST_WIDE_INT_C (1U) << talign_shift));
8100 talign = ceil_log2 (talign);
8101 tkind |= talign << talign_shift;
8102 gcc_checking_assert (tkind
8103 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8104 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8105 build_int_cstu (tkind_type, tkind));
8106 break;
8108 case OMP_CLAUSE_USE_DEVICE_PTR:
8109 case OMP_CLAUSE_IS_DEVICE_PTR:
8110 ovar = OMP_CLAUSE_DECL (c);
8111 var = lookup_decl_in_outer_ctx (ovar, ctx);
8112 x = build_sender_ref (ovar, ctx);
8113 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8114 tkind = GOMP_MAP_USE_DEVICE_PTR;
8115 else
8116 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8117 type = TREE_TYPE (ovar);
8118 if (TREE_CODE (type) == ARRAY_TYPE)
8119 var = build_fold_addr_expr (var);
8120 else
8122 if (omp_is_reference (ovar))
8124 type = TREE_TYPE (type);
8125 if (TREE_CODE (type) != ARRAY_TYPE)
8126 var = build_simple_mem_ref (var);
8127 var = fold_convert (TREE_TYPE (x), var);
8130 gimplify_assign (x, var, &ilist);
8131 s = size_int (0);
8132 purpose = size_int (map_idx++);
8133 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8134 gcc_checking_assert (tkind
8135 < (HOST_WIDE_INT_C (1U) << talign_shift));
8136 gcc_checking_assert (tkind
8137 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8138 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8139 build_int_cstu (tkind_type, tkind));
8140 break;
8143 gcc_assert (map_idx == map_cnt);
8145 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8146 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8147 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8148 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8149 for (int i = 1; i <= 2; i++)
8150 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8152 gimple_seq initlist = NULL;
8153 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8154 TREE_VEC_ELT (t, i)),
8155 &initlist, true, NULL_TREE);
8156 gimple_seq_add_seq (&ilist, initlist);
8158 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8159 NULL);
8160 TREE_THIS_VOLATILE (clobber) = 1;
8161 gimple_seq_add_stmt (&olist,
8162 gimple_build_assign (TREE_VEC_ELT (t, i),
8163 clobber));
8166 tree clobber = build_constructor (ctx->record_type, NULL);
8167 TREE_THIS_VOLATILE (clobber) = 1;
8168 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8169 clobber));
8172 /* Once all the expansions are done, sequence all the different
8173 fragments inside gimple_omp_body. */
8175 new_body = NULL;
8177 if (offloaded
8178 && ctx->record_type)
8180 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8181 /* fixup_child_record_type might have changed receiver_decl's type. */
8182 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8183 gimple_seq_add_stmt (&new_body,
8184 gimple_build_assign (ctx->receiver_decl, t));
8186 gimple_seq_add_seq (&new_body, fplist);
8188 if (offloaded || data_region)
8190 tree prev = NULL_TREE;
8191 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8192 switch (OMP_CLAUSE_CODE (c))
8194 tree var, x;
8195 default:
8196 break;
8197 case OMP_CLAUSE_FIRSTPRIVATE:
8198 if (is_gimple_omp_oacc (ctx->stmt))
8199 break;
8200 var = OMP_CLAUSE_DECL (c);
8201 if (omp_is_reference (var)
8202 || is_gimple_reg_type (TREE_TYPE (var)))
8204 tree new_var = lookup_decl (var, ctx);
8205 tree type;
8206 type = TREE_TYPE (var);
8207 if (omp_is_reference (var))
8208 type = TREE_TYPE (type);
8209 if ((INTEGRAL_TYPE_P (type)
8210 && TYPE_PRECISION (type) <= POINTER_SIZE)
8211 || TREE_CODE (type) == POINTER_TYPE)
8213 x = build_receiver_ref (var, false, ctx);
8214 if (TREE_CODE (type) != POINTER_TYPE)
8215 x = fold_convert (pointer_sized_int_node, x);
8216 x = fold_convert (type, x);
8217 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8218 fb_rvalue);
8219 if (omp_is_reference (var))
8221 tree v = create_tmp_var_raw (type, get_name (var));
8222 gimple_add_tmp_var (v);
8223 TREE_ADDRESSABLE (v) = 1;
8224 gimple_seq_add_stmt (&new_body,
8225 gimple_build_assign (v, x));
8226 x = build_fold_addr_expr (v);
8228 gimple_seq_add_stmt (&new_body,
8229 gimple_build_assign (new_var, x));
8231 else
8233 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8234 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8235 fb_rvalue);
8236 gimple_seq_add_stmt (&new_body,
8237 gimple_build_assign (new_var, x));
8240 else if (is_variable_sized (var))
8242 tree pvar = DECL_VALUE_EXPR (var);
8243 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8244 pvar = TREE_OPERAND (pvar, 0);
8245 gcc_assert (DECL_P (pvar));
8246 tree new_var = lookup_decl (pvar, ctx);
8247 x = build_receiver_ref (var, false, ctx);
8248 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8249 gimple_seq_add_stmt (&new_body,
8250 gimple_build_assign (new_var, x));
8252 break;
8253 case OMP_CLAUSE_PRIVATE:
8254 if (is_gimple_omp_oacc (ctx->stmt))
8255 break;
8256 var = OMP_CLAUSE_DECL (c);
8257 if (omp_is_reference (var))
8259 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8260 tree new_var = lookup_decl (var, ctx);
8261 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8262 if (TREE_CONSTANT (x))
8264 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8265 get_name (var));
8266 gimple_add_tmp_var (x);
8267 TREE_ADDRESSABLE (x) = 1;
8268 x = build_fold_addr_expr_loc (clause_loc, x);
8270 else
8271 break;
8273 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8274 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8275 gimple_seq_add_stmt (&new_body,
8276 gimple_build_assign (new_var, x));
8278 break;
8279 case OMP_CLAUSE_USE_DEVICE_PTR:
8280 case OMP_CLAUSE_IS_DEVICE_PTR:
8281 var = OMP_CLAUSE_DECL (c);
8282 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8283 x = build_sender_ref (var, ctx);
8284 else
8285 x = build_receiver_ref (var, false, ctx);
8286 if (is_variable_sized (var))
8288 tree pvar = DECL_VALUE_EXPR (var);
8289 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8290 pvar = TREE_OPERAND (pvar, 0);
8291 gcc_assert (DECL_P (pvar));
8292 tree new_var = lookup_decl (pvar, ctx);
8293 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8294 gimple_seq_add_stmt (&new_body,
8295 gimple_build_assign (new_var, x));
8297 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8299 tree new_var = lookup_decl (var, ctx);
8300 new_var = DECL_VALUE_EXPR (new_var);
8301 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8302 new_var = TREE_OPERAND (new_var, 0);
8303 gcc_assert (DECL_P (new_var));
8304 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8305 gimple_seq_add_stmt (&new_body,
8306 gimple_build_assign (new_var, x));
8308 else
8310 tree type = TREE_TYPE (var);
8311 tree new_var = lookup_decl (var, ctx);
8312 if (omp_is_reference (var))
8314 type = TREE_TYPE (type);
8315 if (TREE_CODE (type) != ARRAY_TYPE)
8317 tree v = create_tmp_var_raw (type, get_name (var));
8318 gimple_add_tmp_var (v);
8319 TREE_ADDRESSABLE (v) = 1;
8320 x = fold_convert (type, x);
8321 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8322 fb_rvalue);
8323 gimple_seq_add_stmt (&new_body,
8324 gimple_build_assign (v, x));
8325 x = build_fold_addr_expr (v);
8328 new_var = DECL_VALUE_EXPR (new_var);
8329 x = fold_convert (TREE_TYPE (new_var), x);
8330 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8331 gimple_seq_add_stmt (&new_body,
8332 gimple_build_assign (new_var, x));
8334 break;
8336 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8337 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8338 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8339 or references to VLAs. */
8340 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8341 switch (OMP_CLAUSE_CODE (c))
8343 tree var;
8344 default:
8345 break;
8346 case OMP_CLAUSE_MAP:
8347 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8348 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8350 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8351 HOST_WIDE_INT offset = 0;
8352 gcc_assert (prev);
8353 var = OMP_CLAUSE_DECL (c);
8354 if (DECL_P (var)
8355 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8356 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8357 ctx))
8358 && varpool_node::get_create (var)->offloadable)
8359 break;
8360 if (TREE_CODE (var) == INDIRECT_REF
8361 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8362 var = TREE_OPERAND (var, 0);
8363 if (TREE_CODE (var) == COMPONENT_REF)
8365 var = get_addr_base_and_unit_offset (var, &offset);
8366 gcc_assert (var != NULL_TREE && DECL_P (var));
8368 else if (DECL_SIZE (var)
8369 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8371 tree var2 = DECL_VALUE_EXPR (var);
8372 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8373 var2 = TREE_OPERAND (var2, 0);
8374 gcc_assert (DECL_P (var2));
8375 var = var2;
8377 tree new_var = lookup_decl (var, ctx), x;
8378 tree type = TREE_TYPE (new_var);
8379 bool is_ref;
8380 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8381 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8382 == COMPONENT_REF))
8384 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8385 is_ref = true;
8386 new_var = build2 (MEM_REF, type,
8387 build_fold_addr_expr (new_var),
8388 build_int_cst (build_pointer_type (type),
8389 offset));
8391 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8393 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8394 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8395 new_var = build2 (MEM_REF, type,
8396 build_fold_addr_expr (new_var),
8397 build_int_cst (build_pointer_type (type),
8398 offset));
8400 else
8401 is_ref = omp_is_reference (var);
8402 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8403 is_ref = false;
8404 bool ref_to_array = false;
8405 if (is_ref)
8407 type = TREE_TYPE (type);
8408 if (TREE_CODE (type) == ARRAY_TYPE)
8410 type = build_pointer_type (type);
8411 ref_to_array = true;
8414 else if (TREE_CODE (type) == ARRAY_TYPE)
8416 tree decl2 = DECL_VALUE_EXPR (new_var);
8417 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8418 decl2 = TREE_OPERAND (decl2, 0);
8419 gcc_assert (DECL_P (decl2));
8420 new_var = decl2;
8421 type = TREE_TYPE (new_var);
8423 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8424 x = fold_convert_loc (clause_loc, type, x);
8425 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8427 tree bias = OMP_CLAUSE_SIZE (c);
8428 if (DECL_P (bias))
8429 bias = lookup_decl (bias, ctx);
8430 bias = fold_convert_loc (clause_loc, sizetype, bias);
8431 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8432 bias);
8433 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8434 TREE_TYPE (x), x, bias);
8436 if (ref_to_array)
8437 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8438 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8439 if (is_ref && !ref_to_array)
8441 tree t = create_tmp_var_raw (type, get_name (var));
8442 gimple_add_tmp_var (t);
8443 TREE_ADDRESSABLE (t) = 1;
8444 gimple_seq_add_stmt (&new_body,
8445 gimple_build_assign (t, x));
8446 x = build_fold_addr_expr_loc (clause_loc, t);
8448 gimple_seq_add_stmt (&new_body,
8449 gimple_build_assign (new_var, x));
8450 prev = NULL_TREE;
8452 else if (OMP_CLAUSE_CHAIN (c)
8453 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8454 == OMP_CLAUSE_MAP
8455 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8456 == GOMP_MAP_FIRSTPRIVATE_POINTER
8457 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8458 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8459 prev = c;
8460 break;
8461 case OMP_CLAUSE_PRIVATE:
8462 var = OMP_CLAUSE_DECL (c);
8463 if (is_variable_sized (var))
8465 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8466 tree new_var = lookup_decl (var, ctx);
8467 tree pvar = DECL_VALUE_EXPR (var);
8468 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8469 pvar = TREE_OPERAND (pvar, 0);
8470 gcc_assert (DECL_P (pvar));
8471 tree new_pvar = lookup_decl (pvar, ctx);
8472 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8473 tree al = size_int (DECL_ALIGN (var));
8474 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8475 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8476 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8477 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8478 gimple_seq_add_stmt (&new_body,
8479 gimple_build_assign (new_pvar, x));
8481 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8483 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8484 tree new_var = lookup_decl (var, ctx);
8485 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8486 if (TREE_CONSTANT (x))
8487 break;
8488 else
8490 tree atmp
8491 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8492 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8493 tree al = size_int (TYPE_ALIGN (rtype));
8494 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8497 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8498 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8499 gimple_seq_add_stmt (&new_body,
8500 gimple_build_assign (new_var, x));
8502 break;
8505 gimple_seq fork_seq = NULL;
8506 gimple_seq join_seq = NULL;
8508 if (is_oacc_parallel (ctx))
8510 /* If there are reductions on the offloaded region itself, treat
8511 them as a dummy GANG loop. */
8512 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8514 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8515 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8518 gimple_seq_add_seq (&new_body, fork_seq);
8519 gimple_seq_add_seq (&new_body, tgt_body);
8520 gimple_seq_add_seq (&new_body, join_seq);
8522 if (offloaded)
8523 new_body = maybe_catch_exception (new_body);
8525 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8526 gimple_omp_set_body (stmt, new_body);
8529 bind = gimple_build_bind (NULL, NULL,
8530 tgt_bind ? gimple_bind_block (tgt_bind)
8531 : NULL_TREE);
8532 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8533 gimple_bind_add_seq (bind, ilist);
8534 gimple_bind_add_stmt (bind, stmt);
8535 gimple_bind_add_seq (bind, olist);
8537 pop_gimplify_context (NULL);
8539 if (dep_bind)
8541 gimple_bind_add_seq (dep_bind, dep_ilist);
8542 gimple_bind_add_stmt (dep_bind, bind);
8543 gimple_bind_add_seq (dep_bind, dep_olist);
8544 pop_gimplify_context (dep_bind);
8548 /* Expand code for an OpenMP teams directive. */
8550 static void
8551 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8553 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8554 push_gimplify_context ();
8556 tree block = make_node (BLOCK);
8557 gbind *bind = gimple_build_bind (NULL, NULL, block);
8558 gsi_replace (gsi_p, bind, true);
8559 gimple_seq bind_body = NULL;
8560 gimple_seq dlist = NULL;
8561 gimple_seq olist = NULL;
8563 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8564 OMP_CLAUSE_NUM_TEAMS);
8565 if (num_teams == NULL_TREE)
8566 num_teams = build_int_cst (unsigned_type_node, 0);
8567 else
8569 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8570 num_teams = fold_convert (unsigned_type_node, num_teams);
8571 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8573 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8574 OMP_CLAUSE_THREAD_LIMIT);
8575 if (thread_limit == NULL_TREE)
8576 thread_limit = build_int_cst (unsigned_type_node, 0);
8577 else
8579 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8580 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8581 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8582 fb_rvalue);
8585 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8586 &bind_body, &dlist, ctx, NULL);
8587 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8588 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8589 if (!gimple_omp_teams_grid_phony (teams_stmt))
8591 gimple_seq_add_stmt (&bind_body, teams_stmt);
8592 location_t loc = gimple_location (teams_stmt);
8593 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8594 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8595 gimple_set_location (call, loc);
8596 gimple_seq_add_stmt (&bind_body, call);
8599 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8600 gimple_omp_set_body (teams_stmt, NULL);
8601 gimple_seq_add_seq (&bind_body, olist);
8602 gimple_seq_add_seq (&bind_body, dlist);
8603 if (!gimple_omp_teams_grid_phony (teams_stmt))
8604 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8605 gimple_bind_set_body (bind, bind_body);
8607 pop_gimplify_context (bind);
8609 gimple_bind_append_vars (bind, ctx->block_vars);
8610 BLOCK_VARS (block) = ctx->block_vars;
8611 if (BLOCK_VARS (block))
8612 TREE_USED (block) = 1;
8615 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8617 static void
8618 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8620 gimple *stmt = gsi_stmt (*gsi_p);
8621 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8622 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8623 gimple_build_omp_return (false));
8627 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8628 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8629 of OMP context, but with task_shared_vars set. */
8631 static tree
8632 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8633 void *data)
8635 tree t = *tp;
8637 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8638 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8639 return t;
8641 if (task_shared_vars
8642 && DECL_P (t)
8643 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8644 return t;
8646 /* If a global variable has been privatized, TREE_CONSTANT on
8647 ADDR_EXPR might be wrong. */
8648 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8649 recompute_tree_invariant_for_addr_expr (t);
8651 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8652 return NULL_TREE;
8655 /* Data to be communicated between lower_omp_regimplify_operands and
8656 lower_omp_regimplify_operands_p. */
8658 struct lower_omp_regimplify_operands_data
8660 omp_context *ctx;
8661 vec<tree> *decls;
8664 /* Helper function for lower_omp_regimplify_operands. Find
8665 omp_member_access_dummy_var vars and adjust temporarily their
8666 DECL_VALUE_EXPRs if needed. */
8668 static tree
8669 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8670 void *data)
8672 tree t = omp_member_access_dummy_var (*tp);
8673 if (t)
8675 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8676 lower_omp_regimplify_operands_data *ldata
8677 = (lower_omp_regimplify_operands_data *) wi->info;
8678 tree o = maybe_lookup_decl (t, ldata->ctx);
8679 if (o != t)
8681 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8682 ldata->decls->safe_push (*tp);
8683 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8684 SET_DECL_VALUE_EXPR (*tp, v);
8687 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8688 return NULL_TREE;
8691 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8692 of omp_member_access_dummy_var vars during regimplification. */
8694 static void
8695 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8696 gimple_stmt_iterator *gsi_p)
8698 auto_vec<tree, 10> decls;
8699 if (ctx)
8701 struct walk_stmt_info wi;
8702 memset (&wi, '\0', sizeof (wi));
8703 struct lower_omp_regimplify_operands_data data;
8704 data.ctx = ctx;
8705 data.decls = &decls;
8706 wi.info = &data;
8707 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8709 gimple_regimplify_operands (stmt, gsi_p);
8710 while (!decls.is_empty ())
8712 tree t = decls.pop ();
8713 tree v = decls.pop ();
8714 SET_DECL_VALUE_EXPR (t, v);
8718 static void
8719 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8721 gimple *stmt = gsi_stmt (*gsi_p);
8722 struct walk_stmt_info wi;
8723 gcall *call_stmt;
8725 if (gimple_has_location (stmt))
8726 input_location = gimple_location (stmt);
8728 if (task_shared_vars)
8729 memset (&wi, '\0', sizeof (wi));
8731 /* If we have issued syntax errors, avoid doing any heavy lifting.
8732 Just replace the OMP directives with a NOP to avoid
8733 confusing RTL expansion. */
8734 if (seen_error () && is_gimple_omp (stmt))
8736 gsi_replace (gsi_p, gimple_build_nop (), true);
8737 return;
8740 switch (gimple_code (stmt))
8742 case GIMPLE_COND:
8744 gcond *cond_stmt = as_a <gcond *> (stmt);
8745 if ((ctx || task_shared_vars)
8746 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8747 lower_omp_regimplify_p,
8748 ctx ? NULL : &wi, NULL)
8749 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8750 lower_omp_regimplify_p,
8751 ctx ? NULL : &wi, NULL)))
8752 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8754 break;
8755 case GIMPLE_CATCH:
8756 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8757 break;
8758 case GIMPLE_EH_FILTER:
8759 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8760 break;
8761 case GIMPLE_TRY:
8762 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8763 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8764 break;
8765 case GIMPLE_TRANSACTION:
8766 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8767 ctx);
8768 break;
8769 case GIMPLE_BIND:
8770 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8771 break;
8772 case GIMPLE_OMP_PARALLEL:
8773 case GIMPLE_OMP_TASK:
8774 ctx = maybe_lookup_ctx (stmt);
8775 gcc_assert (ctx);
8776 if (ctx->cancellable)
8777 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8778 lower_omp_taskreg (gsi_p, ctx);
8779 break;
8780 case GIMPLE_OMP_FOR:
8781 ctx = maybe_lookup_ctx (stmt);
8782 gcc_assert (ctx);
8783 if (ctx->cancellable)
8784 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8785 lower_omp_for (gsi_p, ctx);
8786 break;
8787 case GIMPLE_OMP_SECTIONS:
8788 ctx = maybe_lookup_ctx (stmt);
8789 gcc_assert (ctx);
8790 if (ctx->cancellable)
8791 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8792 lower_omp_sections (gsi_p, ctx);
8793 break;
8794 case GIMPLE_OMP_SINGLE:
8795 ctx = maybe_lookup_ctx (stmt);
8796 gcc_assert (ctx);
8797 lower_omp_single (gsi_p, ctx);
8798 break;
8799 case GIMPLE_OMP_MASTER:
8800 ctx = maybe_lookup_ctx (stmt);
8801 gcc_assert (ctx);
8802 lower_omp_master (gsi_p, ctx);
8803 break;
8804 case GIMPLE_OMP_TASKGROUP:
8805 ctx = maybe_lookup_ctx (stmt);
8806 gcc_assert (ctx);
8807 lower_omp_taskgroup (gsi_p, ctx);
8808 break;
8809 case GIMPLE_OMP_ORDERED:
8810 ctx = maybe_lookup_ctx (stmt);
8811 gcc_assert (ctx);
8812 lower_omp_ordered (gsi_p, ctx);
8813 break;
8814 case GIMPLE_OMP_CRITICAL:
8815 ctx = maybe_lookup_ctx (stmt);
8816 gcc_assert (ctx);
8817 lower_omp_critical (gsi_p, ctx);
8818 break;
8819 case GIMPLE_OMP_ATOMIC_LOAD:
8820 if ((ctx || task_shared_vars)
8821 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8822 as_a <gomp_atomic_load *> (stmt)),
8823 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8824 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8825 break;
8826 case GIMPLE_OMP_TARGET:
8827 ctx = maybe_lookup_ctx (stmt);
8828 gcc_assert (ctx);
8829 lower_omp_target (gsi_p, ctx);
8830 break;
8831 case GIMPLE_OMP_TEAMS:
8832 ctx = maybe_lookup_ctx (stmt);
8833 gcc_assert (ctx);
8834 lower_omp_teams (gsi_p, ctx);
8835 break;
8836 case GIMPLE_OMP_GRID_BODY:
8837 ctx = maybe_lookup_ctx (stmt);
8838 gcc_assert (ctx);
8839 lower_omp_grid_body (gsi_p, ctx);
8840 break;
8841 case GIMPLE_CALL:
8842 tree fndecl;
8843 call_stmt = as_a <gcall *> (stmt);
8844 fndecl = gimple_call_fndecl (call_stmt);
8845 if (fndecl
8846 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8847 switch (DECL_FUNCTION_CODE (fndecl))
8849 case BUILT_IN_GOMP_BARRIER:
8850 if (ctx == NULL)
8851 break;
8852 /* FALLTHRU */
8853 case BUILT_IN_GOMP_CANCEL:
8854 case BUILT_IN_GOMP_CANCELLATION_POINT:
8855 omp_context *cctx;
8856 cctx = ctx;
8857 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8858 cctx = cctx->outer;
8859 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8860 if (!cctx->cancellable)
8862 if (DECL_FUNCTION_CODE (fndecl)
8863 == BUILT_IN_GOMP_CANCELLATION_POINT)
8865 stmt = gimple_build_nop ();
8866 gsi_replace (gsi_p, stmt, false);
8868 break;
8870 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8872 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8873 gimple_call_set_fndecl (call_stmt, fndecl);
8874 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8876 tree lhs;
8877 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8878 gimple_call_set_lhs (call_stmt, lhs);
8879 tree fallthru_label;
8880 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8881 gimple *g;
8882 g = gimple_build_label (fallthru_label);
8883 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8884 g = gimple_build_cond (NE_EXPR, lhs,
8885 fold_convert (TREE_TYPE (lhs),
8886 boolean_false_node),
8887 cctx->cancel_label, fallthru_label);
8888 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8889 break;
8890 default:
8891 break;
8893 /* FALLTHRU */
8894 default:
8895 if ((ctx || task_shared_vars)
8896 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8897 ctx ? NULL : &wi))
8899 /* Just remove clobbers, this should happen only if we have
8900 "privatized" local addressable variables in SIMD regions,
8901 the clobber isn't needed in that case and gimplifying address
8902 of the ARRAY_REF into a pointer and creating MEM_REF based
8903 clobber would create worse code than we get with the clobber
8904 dropped. */
8905 if (gimple_clobber_p (stmt))
8907 gsi_replace (gsi_p, gimple_build_nop (), true);
8908 break;
8910 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8912 break;
8916 static void
8917 lower_omp (gimple_seq *body, omp_context *ctx)
8919 location_t saved_location = input_location;
8920 gimple_stmt_iterator gsi;
8921 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8922 lower_omp_1 (&gsi, ctx);
8923 /* During gimplification, we haven't folded statments inside offloading
8924 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8925 if (target_nesting_level || taskreg_nesting_level)
8926 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8927 fold_stmt (&gsi);
8928 input_location = saved_location;
8931 /* Main entry point. */
8933 static unsigned int
8934 execute_lower_omp (void)
8936 gimple_seq body;
8937 int i;
8938 omp_context *ctx;
8940 /* This pass always runs, to provide PROP_gimple_lomp.
8941 But often, there is nothing to do. */
8942 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8943 && flag_openmp_simd == 0)
8944 return 0;
8946 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8947 delete_omp_context);
8949 body = gimple_body (current_function_decl);
8951 if (hsa_gen_requested_p ())
8952 omp_grid_gridify_all_targets (&body);
8954 scan_omp (&body, NULL);
8955 gcc_assert (taskreg_nesting_level == 0);
8956 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8957 finish_taskreg_scan (ctx);
8958 taskreg_contexts.release ();
8960 if (all_contexts->root)
8962 if (task_shared_vars)
8963 push_gimplify_context ();
8964 lower_omp (&body, NULL);
8965 if (task_shared_vars)
8966 pop_gimplify_context (NULL);
8969 if (all_contexts)
8971 splay_tree_delete (all_contexts);
8972 all_contexts = NULL;
8974 BITMAP_FREE (task_shared_vars);
8975 return 0;
8978 namespace {
8980 const pass_data pass_data_lower_omp =
8982 GIMPLE_PASS, /* type */
8983 "omplower", /* name */
8984 OPTGROUP_OMP, /* optinfo_flags */
8985 TV_NONE, /* tv_id */
8986 PROP_gimple_any, /* properties_required */
8987 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8988 0, /* properties_destroyed */
8989 0, /* todo_flags_start */
8990 0, /* todo_flags_finish */
8993 class pass_lower_omp : public gimple_opt_pass
8995 public:
8996 pass_lower_omp (gcc::context *ctxt)
8997 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9000 /* opt_pass methods: */
9001 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9003 }; // class pass_lower_omp
9005 } // anon namespace
9007 gimple_opt_pass *
9008 make_pass_lower_omp (gcc::context *ctxt)
9010 return new pass_lower_omp (ctxt);
9013 /* The following is a utility to diagnose structured block violations.
9014 It is not part of the "omplower" pass, as that's invoked too late. It
9015 should be invoked by the respective front ends after gimplification. */
9017 static splay_tree all_labels;
9019 /* Check for mismatched contexts and generate an error if needed. Return
9020 true if an error is detected. */
9022 static bool
9023 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9024 gimple *branch_ctx, gimple *label_ctx)
9026 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9027 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9029 if (label_ctx == branch_ctx)
9030 return false;
9032 const char* kind = NULL;
9034 if (flag_cilkplus)
9036 if ((branch_ctx
9037 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9038 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9039 || (label_ctx
9040 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9041 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9042 kind = "Cilk Plus";
9044 if (flag_openacc)
9046 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9047 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9049 gcc_checking_assert (kind == NULL);
9050 kind = "OpenACC";
9053 if (kind == NULL)
9055 gcc_checking_assert (flag_openmp);
9056 kind = "OpenMP";
9059 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9060 so we could traverse it and issue a correct "exit" or "enter" error
9061 message upon a structured block violation.
9063 We built the context by building a list with tree_cons'ing, but there is
9064 no easy counterpart in gimple tuples. It seems like far too much work
9065 for issuing exit/enter error messages. If someone really misses the
9066 distinct error message... patches welcome. */
9068 #if 0
9069 /* Try to avoid confusing the user by producing and error message
9070 with correct "exit" or "enter" verbiage. We prefer "exit"
9071 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9072 if (branch_ctx == NULL)
9073 exit_p = false;
9074 else
9076 while (label_ctx)
9078 if (TREE_VALUE (label_ctx) == branch_ctx)
9080 exit_p = false;
9081 break;
9083 label_ctx = TREE_CHAIN (label_ctx);
9087 if (exit_p)
9088 error ("invalid exit from %s structured block", kind);
9089 else
9090 error ("invalid entry to %s structured block", kind);
9091 #endif
9093 /* If it's obvious we have an invalid entry, be specific about the error. */
9094 if (branch_ctx == NULL)
9095 error ("invalid entry to %s structured block", kind);
9096 else
9098 /* Otherwise, be vague and lazy, but efficient. */
9099 error ("invalid branch to/from %s structured block", kind);
9102 gsi_replace (gsi_p, gimple_build_nop (), false);
9103 return true;
9106 /* Pass 1: Create a minimal tree of structured blocks, and record
9107 where each label is found. */
9109 static tree
9110 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9111 struct walk_stmt_info *wi)
9113 gimple *context = (gimple *) wi->info;
9114 gimple *inner_context;
9115 gimple *stmt = gsi_stmt (*gsi_p);
9117 *handled_ops_p = true;
9119 switch (gimple_code (stmt))
9121 WALK_SUBSTMTS;
9123 case GIMPLE_OMP_PARALLEL:
9124 case GIMPLE_OMP_TASK:
9125 case GIMPLE_OMP_SECTIONS:
9126 case GIMPLE_OMP_SINGLE:
9127 case GIMPLE_OMP_SECTION:
9128 case GIMPLE_OMP_MASTER:
9129 case GIMPLE_OMP_ORDERED:
9130 case GIMPLE_OMP_CRITICAL:
9131 case GIMPLE_OMP_TARGET:
9132 case GIMPLE_OMP_TEAMS:
9133 case GIMPLE_OMP_TASKGROUP:
9134 /* The minimal context here is just the current OMP construct. */
9135 inner_context = stmt;
9136 wi->info = inner_context;
9137 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9138 wi->info = context;
9139 break;
9141 case GIMPLE_OMP_FOR:
9142 inner_context = stmt;
9143 wi->info = inner_context;
9144 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9145 walk them. */
9146 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9147 diagnose_sb_1, NULL, wi);
9148 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9149 wi->info = context;
9150 break;
9152 case GIMPLE_LABEL:
9153 splay_tree_insert (all_labels,
9154 (splay_tree_key) gimple_label_label (
9155 as_a <glabel *> (stmt)),
9156 (splay_tree_value) context);
9157 break;
9159 default:
9160 break;
9163 return NULL_TREE;
9166 /* Pass 2: Check each branch and see if its context differs from that of
9167 the destination label's context. */
9169 static tree
9170 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9171 struct walk_stmt_info *wi)
9173 gimple *context = (gimple *) wi->info;
9174 splay_tree_node n;
9175 gimple *stmt = gsi_stmt (*gsi_p);
9177 *handled_ops_p = true;
9179 switch (gimple_code (stmt))
9181 WALK_SUBSTMTS;
9183 case GIMPLE_OMP_PARALLEL:
9184 case GIMPLE_OMP_TASK:
9185 case GIMPLE_OMP_SECTIONS:
9186 case GIMPLE_OMP_SINGLE:
9187 case GIMPLE_OMP_SECTION:
9188 case GIMPLE_OMP_MASTER:
9189 case GIMPLE_OMP_ORDERED:
9190 case GIMPLE_OMP_CRITICAL:
9191 case GIMPLE_OMP_TARGET:
9192 case GIMPLE_OMP_TEAMS:
9193 case GIMPLE_OMP_TASKGROUP:
9194 wi->info = stmt;
9195 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9196 wi->info = context;
9197 break;
9199 case GIMPLE_OMP_FOR:
9200 wi->info = stmt;
9201 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9202 walk them. */
9203 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9204 diagnose_sb_2, NULL, wi);
9205 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9206 wi->info = context;
9207 break;
9209 case GIMPLE_COND:
9211 gcond *cond_stmt = as_a <gcond *> (stmt);
9212 tree lab = gimple_cond_true_label (cond_stmt);
9213 if (lab)
9215 n = splay_tree_lookup (all_labels,
9216 (splay_tree_key) lab);
9217 diagnose_sb_0 (gsi_p, context,
9218 n ? (gimple *) n->value : NULL);
9220 lab = gimple_cond_false_label (cond_stmt);
9221 if (lab)
9223 n = splay_tree_lookup (all_labels,
9224 (splay_tree_key) lab);
9225 diagnose_sb_0 (gsi_p, context,
9226 n ? (gimple *) n->value : NULL);
9229 break;
9231 case GIMPLE_GOTO:
9233 tree lab = gimple_goto_dest (stmt);
9234 if (TREE_CODE (lab) != LABEL_DECL)
9235 break;
9237 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9238 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9240 break;
9242 case GIMPLE_SWITCH:
9244 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9245 unsigned int i;
9246 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9248 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9249 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9250 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9251 break;
9254 break;
9256 case GIMPLE_RETURN:
9257 diagnose_sb_0 (gsi_p, context, NULL);
9258 break;
9260 default:
9261 break;
9264 return NULL_TREE;
9267 static unsigned int
9268 diagnose_omp_structured_block_errors (void)
9270 struct walk_stmt_info wi;
9271 gimple_seq body = gimple_body (current_function_decl);
9273 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9275 memset (&wi, 0, sizeof (wi));
9276 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9278 memset (&wi, 0, sizeof (wi));
9279 wi.want_locations = true;
9280 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9282 gimple_set_body (current_function_decl, body);
9284 splay_tree_delete (all_labels);
9285 all_labels = NULL;
9287 return 0;
9290 namespace {
9292 const pass_data pass_data_diagnose_omp_blocks =
9294 GIMPLE_PASS, /* type */
9295 "*diagnose_omp_blocks", /* name */
9296 OPTGROUP_OMP, /* optinfo_flags */
9297 TV_NONE, /* tv_id */
9298 PROP_gimple_any, /* properties_required */
9299 0, /* properties_provided */
9300 0, /* properties_destroyed */
9301 0, /* todo_flags_start */
9302 0, /* todo_flags_finish */
9305 class pass_diagnose_omp_blocks : public gimple_opt_pass
9307 public:
9308 pass_diagnose_omp_blocks (gcc::context *ctxt)
9309 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9312 /* opt_pass methods: */
9313 virtual bool gate (function *)
9315 return flag_cilkplus || flag_openacc || flag_openmp;
9317 virtual unsigned int execute (function *)
9319 return diagnose_omp_structured_block_errors ();
9322 }; // class pass_diagnose_omp_blocks
9324 } // anon namespace
9326 gimple_opt_pass *
9327 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9329 return new pass_diagnose_omp_blocks (ctxt);
9333 #include "gt-omp-low.h"