PR c/79847
[official-gcc.git] / gcc / omp-low.c
blobc2c69cbcc6ea9f3e123ff5bbc1280f4f48d6eb3e
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
115 /* What to do with variables with implicitly determined sharing
116 attributes. */
117 enum omp_clause_default_kind default_kind;
119 /* Nesting depth of this context. Used to beautify error messages re
120 invalid gotos. The outermost ctx is depth 1, with depth 0 being
121 reserved for the main body of the function. */
122 int depth;
124 /* True if this parallel directive is nested within another. */
125 bool is_nested;
127 /* True if this construct can be cancelled. */
128 bool cancellable;
131 static splay_tree all_contexts;
132 static int taskreg_nesting_level;
133 static int target_nesting_level;
134 static bitmap task_shared_vars;
135 static vec<omp_context *> taskreg_contexts;
137 static void scan_omp (gimple_seq *, omp_context *);
138 static tree scan_omp_1_op (tree *, int *, void *);
140 #define WALK_SUBSTMTS \
141 case GIMPLE_BIND: \
142 case GIMPLE_TRY: \
143 case GIMPLE_CATCH: \
144 case GIMPLE_EH_FILTER: \
145 case GIMPLE_TRANSACTION: \
146 /* The sub-statements for these should be walked. */ \
147 *handled_ops_p = false; \
148 break;
150 /* Return true if CTX corresponds to an oacc parallel region. */
152 static bool
153 is_oacc_parallel (omp_context *ctx)
155 enum gimple_code outer_type = gimple_code (ctx->stmt);
156 return ((outer_type == GIMPLE_OMP_TARGET)
157 && (gimple_omp_target_kind (ctx->stmt)
158 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
161 /* Return true if CTX corresponds to an oacc kernels region. */
163 static bool
164 is_oacc_kernels (omp_context *ctx)
166 enum gimple_code outer_type = gimple_code (ctx->stmt);
167 return ((outer_type == GIMPLE_OMP_TARGET)
168 && (gimple_omp_target_kind (ctx->stmt)
169 == GF_OMP_TARGET_KIND_OACC_KERNELS));
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173 data member privatization, return the underlying "this" parameter,
174 otherwise return NULL. */
176 tree
177 omp_member_access_dummy_var (tree decl)
179 if (!VAR_P (decl)
180 || !DECL_ARTIFICIAL (decl)
181 || !DECL_IGNORED_P (decl)
182 || !DECL_HAS_VALUE_EXPR_P (decl)
183 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
184 return NULL_TREE;
186 tree v = DECL_VALUE_EXPR (decl);
187 if (TREE_CODE (v) != COMPONENT_REF)
188 return NULL_TREE;
190 while (1)
191 switch (TREE_CODE (v))
193 case COMPONENT_REF:
194 case MEM_REF:
195 case INDIRECT_REF:
196 CASE_CONVERT:
197 case POINTER_PLUS_EXPR:
198 v = TREE_OPERAND (v, 0);
199 continue;
200 case PARM_DECL:
201 if (DECL_CONTEXT (v) == current_function_decl
202 && DECL_ARTIFICIAL (v)
203 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
204 return v;
205 return NULL_TREE;
206 default:
207 return NULL_TREE;
211 /* Helper for unshare_and_remap, called through walk_tree. */
213 static tree
214 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
216 tree *pair = (tree *) data;
217 if (*tp == pair[0])
219 *tp = unshare_expr (pair[1]);
220 *walk_subtrees = 0;
222 else if (IS_TYPE_OR_DECL_P (*tp))
223 *walk_subtrees = 0;
224 return NULL_TREE;
227 /* Return unshare_expr (X) with all occurrences of FROM
228 replaced with TO. */
230 static tree
231 unshare_and_remap (tree x, tree from, tree to)
233 tree pair[2] = { from, to };
234 x = unshare_expr (x);
235 walk_tree (&x, unshare_and_remap_1, pair, NULL);
236 return x;
239 /* Convenience function for calling scan_omp_1_op on tree operands. */
241 static inline tree
242 scan_omp_op (tree *tp, omp_context *ctx)
244 struct walk_stmt_info wi;
246 memset (&wi, 0, sizeof (wi));
247 wi.info = ctx;
248 wi.want_locations = true;
250 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
253 static void lower_omp (gimple_seq *, omp_context *);
254 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
255 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
257 /* Return true if CTX is for an omp parallel. */
259 static inline bool
260 is_parallel_ctx (omp_context *ctx)
262 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
266 /* Return true if CTX is for an omp task. */
268 static inline bool
269 is_task_ctx (omp_context *ctx)
271 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
275 /* Return true if CTX is for an omp taskloop. */
277 static inline bool
278 is_taskloop_ctx (omp_context *ctx)
280 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
281 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
285 /* Return true if CTX is for an omp parallel or omp task. */
287 static inline bool
288 is_taskreg_ctx (omp_context *ctx)
290 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
293 /* Return true if EXPR is variable sized. */
295 static inline bool
296 is_variable_sized (const_tree expr)
298 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
301 /* Lookup variables. The "maybe" form
302 allows for the variable form to not have been entered, otherwise we
303 assert that the variable must have been entered. */
305 static inline tree
306 lookup_decl (tree var, omp_context *ctx)
308 tree *n = ctx->cb.decl_map->get (var);
309 return *n;
312 static inline tree
313 maybe_lookup_decl (const_tree var, omp_context *ctx)
315 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
316 return n ? *n : NULL_TREE;
319 static inline tree
320 lookup_field (tree var, omp_context *ctx)
322 splay_tree_node n;
323 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
324 return (tree) n->value;
327 static inline tree
328 lookup_sfield (splay_tree_key key, omp_context *ctx)
330 splay_tree_node n;
331 n = splay_tree_lookup (ctx->sfield_map
332 ? ctx->sfield_map : ctx->field_map, key);
333 return (tree) n->value;
336 static inline tree
337 lookup_sfield (tree var, omp_context *ctx)
339 return lookup_sfield ((splay_tree_key) var, ctx);
342 static inline tree
343 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
345 splay_tree_node n;
346 n = splay_tree_lookup (ctx->field_map, key);
347 return n ? (tree) n->value : NULL_TREE;
350 static inline tree
351 maybe_lookup_field (tree var, omp_context *ctx)
353 return maybe_lookup_field ((splay_tree_key) var, ctx);
356 /* Return true if DECL should be copied by pointer. SHARED_CTX is
357 the parallel context if DECL is to be shared. */
359 static bool
360 use_pointer_for_field (tree decl, omp_context *shared_ctx)
362 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
363 || TYPE_ATOMIC (TREE_TYPE (decl)))
364 return true;
366 /* We can only use copy-in/copy-out semantics for shared variables
367 when we know the value is not accessible from an outer scope. */
368 if (shared_ctx)
370 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
372 /* ??? Trivially accessible from anywhere. But why would we even
373 be passing an address in this case? Should we simply assert
374 this to be false, or should we have a cleanup pass that removes
375 these from the list of mappings? */
376 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
377 return true;
379 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 without analyzing the expression whether or not its location
381 is accessible to anyone else. In the case of nested parallel
382 regions it certainly may be. */
383 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
384 return true;
386 /* Do not use copy-in/copy-out for variables that have their
387 address taken. */
388 if (TREE_ADDRESSABLE (decl))
389 return true;
391 /* lower_send_shared_vars only uses copy-in, but not copy-out
392 for these. */
393 if (TREE_READONLY (decl)
394 || ((TREE_CODE (decl) == RESULT_DECL
395 || TREE_CODE (decl) == PARM_DECL)
396 && DECL_BY_REFERENCE (decl)))
397 return false;
399 /* Disallow copy-in/out in nested parallel if
400 decl is shared in outer parallel, otherwise
401 each thread could store the shared variable
402 in its own copy-in location, making the
403 variable no longer really shared. */
404 if (shared_ctx->is_nested)
406 omp_context *up;
408 for (up = shared_ctx->outer; up; up = up->outer)
409 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
410 break;
412 if (up)
414 tree c;
416 for (c = gimple_omp_taskreg_clauses (up->stmt);
417 c; c = OMP_CLAUSE_CHAIN (c))
418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
419 && OMP_CLAUSE_DECL (c) == decl)
420 break;
422 if (c)
423 goto maybe_mark_addressable_and_ret;
427 /* For tasks avoid using copy-in/out. As tasks can be
428 deferred or executed in different thread, when GOMP_task
429 returns, the task hasn't necessarily terminated. */
430 if (is_task_ctx (shared_ctx))
432 tree outer;
433 maybe_mark_addressable_and_ret:
434 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
435 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
437 /* Taking address of OUTER in lower_send_shared_vars
438 might need regimplification of everything that uses the
439 variable. */
440 if (!task_shared_vars)
441 task_shared_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
443 TREE_ADDRESSABLE (outer) = 1;
445 return true;
449 return false;
452 /* Construct a new automatic decl similar to VAR. */
454 static tree
455 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
457 tree copy = copy_var_decl (var, name, type);
459 DECL_CONTEXT (copy) = current_function_decl;
460 DECL_CHAIN (copy) = ctx->block_vars;
461 /* If VAR is listed in task_shared_vars, it means it wasn't
462 originally addressable and is just because task needs to take
463 it's address. But we don't need to take address of privatizations
464 from that var. */
465 if (TREE_ADDRESSABLE (var)
466 && task_shared_vars
467 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
468 TREE_ADDRESSABLE (copy) = 0;
469 ctx->block_vars = copy;
471 return copy;
474 static tree
475 omp_copy_decl_1 (tree var, omp_context *ctx)
477 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 as appropriate. */
482 static tree
483 omp_build_component_ref (tree obj, tree field)
485 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
486 if (TREE_THIS_VOLATILE (field))
487 TREE_THIS_VOLATILE (ret) |= 1;
488 if (TREE_READONLY (field))
489 TREE_READONLY (ret) |= 1;
490 return ret;
493 /* Build tree nodes to access the field for VAR on the receiver side. */
495 static tree
496 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
498 tree x, field = lookup_field (var, ctx);
500 /* If the receiver record type was remapped in the child function,
501 remap the field into the new record type. */
502 x = maybe_lookup_field (field, ctx);
503 if (x != NULL)
504 field = x;
506 x = build_simple_mem_ref (ctx->receiver_decl);
507 TREE_THIS_NOTRAP (x) = 1;
508 x = omp_build_component_ref (x, field);
509 if (by_ref)
511 x = build_simple_mem_ref (x);
512 TREE_THIS_NOTRAP (x) = 1;
515 return x;
518 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
519 of a parallel, this is a component reference; for workshare constructs
520 this is some variable. */
522 static tree
523 build_outer_var_ref (tree var, omp_context *ctx,
524 enum omp_clause_code code = OMP_CLAUSE_ERROR)
526 tree x;
528 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
529 x = var;
530 else if (is_variable_sized (var))
532 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
533 x = build_outer_var_ref (x, ctx, code);
534 x = build_simple_mem_ref (x);
536 else if (is_taskreg_ctx (ctx))
538 bool by_ref = use_pointer_for_field (var, NULL);
539 x = build_receiver_ref (var, by_ref, ctx);
541 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
542 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
543 || (code == OMP_CLAUSE_PRIVATE
544 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
545 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
546 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
548 /* #pragma omp simd isn't a worksharing construct, and can reference
549 even private vars in its linear etc. clauses.
550 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 to private vars in all worksharing constructs. */
552 x = NULL_TREE;
553 if (ctx->outer && is_taskreg_ctx (ctx))
554 x = lookup_decl (var, ctx->outer);
555 else if (ctx->outer)
556 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
557 if (x == NULL_TREE)
558 x = var;
560 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
562 gcc_assert (ctx->outer);
563 splay_tree_node n
564 = splay_tree_lookup (ctx->outer->field_map,
565 (splay_tree_key) &DECL_UID (var));
566 if (n == NULL)
568 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
569 x = var;
570 else
571 x = lookup_decl (var, ctx->outer);
573 else
575 tree field = (tree) n->value;
576 /* If the receiver record type was remapped in the child function,
577 remap the field into the new record type. */
578 x = maybe_lookup_field (field, ctx->outer);
579 if (x != NULL)
580 field = x;
582 x = build_simple_mem_ref (ctx->outer->receiver_decl);
583 x = omp_build_component_ref (x, field);
584 if (use_pointer_for_field (var, ctx->outer))
585 x = build_simple_mem_ref (x);
588 else if (ctx->outer)
590 omp_context *outer = ctx->outer;
591 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
593 outer = outer->outer;
594 gcc_assert (outer
595 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
597 x = lookup_decl (var, outer);
599 else if (omp_is_reference (var))
600 /* This can happen with orphaned constructs. If var is reference, it is
601 possible it is shared and as such valid. */
602 x = var;
603 else if (omp_member_access_dummy_var (var))
604 x = var;
605 else
606 gcc_unreachable ();
608 if (x == var)
610 tree t = omp_member_access_dummy_var (var);
611 if (t)
613 x = DECL_VALUE_EXPR (var);
614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
615 if (o != t)
616 x = unshare_and_remap (x, t, o);
617 else
618 x = unshare_expr (x);
622 if (omp_is_reference (var))
623 x = build_simple_mem_ref (x);
625 return x;
628 /* Build tree nodes to access the field for VAR on the sender side. */
630 static tree
631 build_sender_ref (splay_tree_key key, omp_context *ctx)
633 tree field = lookup_sfield (key, ctx);
634 return omp_build_component_ref (ctx->sender_decl, field);
637 static tree
638 build_sender_ref (tree var, omp_context *ctx)
640 return build_sender_ref ((splay_tree_key) var, ctx);
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
644 BASE_POINTERS_RESTRICT, declare the field with restrict. */
646 static void
647 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
648 bool base_pointers_restrict = false)
650 tree field, type, sfield = NULL_TREE;
651 splay_tree_key key = (splay_tree_key) var;
653 if ((mask & 8) != 0)
655 key = (splay_tree_key) &DECL_UID (var);
656 gcc_checking_assert (key != (splay_tree_key) var);
658 gcc_assert ((mask & 1) == 0
659 || !splay_tree_lookup (ctx->field_map, key));
660 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
661 || !splay_tree_lookup (ctx->sfield_map, key));
662 gcc_assert ((mask & 3) == 3
663 || !is_gimple_omp_oacc (ctx->stmt));
665 type = TREE_TYPE (var);
666 /* Prevent redeclaring the var in the split-off function with a restrict
667 pointer type. Note that we only clear type itself, restrict qualifiers in
668 the pointed-to type will be ignored by points-to analysis. */
669 if (POINTER_TYPE_P (type)
670 && TYPE_RESTRICT (type))
671 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
673 if (mask & 4)
675 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
676 type = build_pointer_type (build_pointer_type (type));
678 else if (by_ref)
680 type = build_pointer_type (type);
681 if (base_pointers_restrict)
682 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
684 else if ((mask & 3) == 1 && omp_is_reference (var))
685 type = TREE_TYPE (type);
687 field = build_decl (DECL_SOURCE_LOCATION (var),
688 FIELD_DECL, DECL_NAME (var), type);
690 /* Remember what variable this field was created for. This does have a
691 side effect of making dwarf2out ignore this member, so for helpful
692 debugging we clear it later in delete_omp_context. */
693 DECL_ABSTRACT_ORIGIN (field) = var;
694 if (type == TREE_TYPE (var))
696 SET_DECL_ALIGN (field, DECL_ALIGN (var));
697 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
698 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
700 else
701 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
703 if ((mask & 3) == 3)
705 insert_field_into_struct (ctx->record_type, field);
706 if (ctx->srecord_type)
708 sfield = build_decl (DECL_SOURCE_LOCATION (var),
709 FIELD_DECL, DECL_NAME (var), type);
710 DECL_ABSTRACT_ORIGIN (sfield) = var;
711 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
712 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
713 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
714 insert_field_into_struct (ctx->srecord_type, sfield);
717 else
719 if (ctx->srecord_type == NULL_TREE)
721 tree t;
723 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
724 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
725 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
727 sfield = build_decl (DECL_SOURCE_LOCATION (t),
728 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
729 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
730 insert_field_into_struct (ctx->srecord_type, sfield);
731 splay_tree_insert (ctx->sfield_map,
732 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
733 (splay_tree_value) sfield);
736 sfield = field;
737 insert_field_into_struct ((mask & 1) ? ctx->record_type
738 : ctx->srecord_type, field);
741 if (mask & 1)
742 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
743 if ((mask & 2) && ctx->sfield_map)
744 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
747 static tree
748 install_var_local (tree var, omp_context *ctx)
750 tree new_var = omp_copy_decl_1 (var, ctx);
751 insert_decl_map (&ctx->cb, var, new_var);
752 return new_var;
755 /* Adjust the replacement for DECL in CTX for the new context. This means
756 copying the DECL_VALUE_EXPR, and fixing up the type. */
758 static void
759 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
761 tree new_decl, size;
763 new_decl = lookup_decl (decl, ctx);
765 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
767 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
768 && DECL_HAS_VALUE_EXPR_P (decl))
770 tree ve = DECL_VALUE_EXPR (decl);
771 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
772 SET_DECL_VALUE_EXPR (new_decl, ve);
773 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
776 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
778 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
779 if (size == error_mark_node)
780 size = TYPE_SIZE (TREE_TYPE (new_decl));
781 DECL_SIZE (new_decl) = size;
783 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
784 if (size == error_mark_node)
785 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
786 DECL_SIZE_UNIT (new_decl) = size;
790 /* The callback for remap_decl. Search all containing contexts for a
791 mapping of the variable; this avoids having to duplicate the splay
792 tree ahead of time. We know a mapping doesn't already exist in the
793 given context. Create new mappings to implement default semantics. */
795 static tree
796 omp_copy_decl (tree var, copy_body_data *cb)
798 omp_context *ctx = (omp_context *) cb;
799 tree new_var;
801 if (TREE_CODE (var) == LABEL_DECL)
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_DEFAULT:
1166 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1167 break;
1169 case OMP_CLAUSE_FINAL:
1170 case OMP_CLAUSE_IF:
1171 case OMP_CLAUSE_NUM_THREADS:
1172 case OMP_CLAUSE_NUM_TEAMS:
1173 case OMP_CLAUSE_THREAD_LIMIT:
1174 case OMP_CLAUSE_DEVICE:
1175 case OMP_CLAUSE_SCHEDULE:
1176 case OMP_CLAUSE_DIST_SCHEDULE:
1177 case OMP_CLAUSE_DEPEND:
1178 case OMP_CLAUSE_PRIORITY:
1179 case OMP_CLAUSE_GRAINSIZE:
1180 case OMP_CLAUSE_NUM_TASKS:
1181 case OMP_CLAUSE__CILK_FOR_COUNT_:
1182 case OMP_CLAUSE_NUM_GANGS:
1183 case OMP_CLAUSE_NUM_WORKERS:
1184 case OMP_CLAUSE_VECTOR_LENGTH:
1185 if (ctx->outer)
1186 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1187 break;
1189 case OMP_CLAUSE_TO:
1190 case OMP_CLAUSE_FROM:
1191 case OMP_CLAUSE_MAP:
1192 if (ctx->outer)
1193 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1194 decl = OMP_CLAUSE_DECL (c);
1195 /* Global variables with "omp declare target" attribute
1196 don't need to be copied, the receiver side will use them
1197 directly. However, global variables with "omp declare target link"
1198 attribute need to be copied. */
1199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1200 && DECL_P (decl)
1201 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1202 && (OMP_CLAUSE_MAP_KIND (c)
1203 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1204 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1205 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1206 && varpool_node::get_create (decl)->offloadable
1207 && !lookup_attribute ("omp declare target link",
1208 DECL_ATTRIBUTES (decl)))
1209 break;
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1211 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1213 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1214 not offloaded; there is nothing to map for those. */
1215 if (!is_gimple_omp_offloaded (ctx->stmt)
1216 && !POINTER_TYPE_P (TREE_TYPE (decl))
1217 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1218 break;
1220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1221 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1222 || (OMP_CLAUSE_MAP_KIND (c)
1223 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1225 if (TREE_CODE (decl) == COMPONENT_REF
1226 || (TREE_CODE (decl) == INDIRECT_REF
1227 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1228 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1229 == REFERENCE_TYPE)))
1230 break;
1231 if (DECL_SIZE (decl)
1232 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1234 tree decl2 = DECL_VALUE_EXPR (decl);
1235 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1236 decl2 = TREE_OPERAND (decl2, 0);
1237 gcc_assert (DECL_P (decl2));
1238 install_var_local (decl2, ctx);
1240 install_var_local (decl, ctx);
1241 break;
1243 if (DECL_P (decl))
1245 if (DECL_SIZE (decl)
1246 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1248 tree decl2 = DECL_VALUE_EXPR (decl);
1249 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1250 decl2 = TREE_OPERAND (decl2, 0);
1251 gcc_assert (DECL_P (decl2));
1252 install_var_field (decl2, true, 3, ctx);
1253 install_var_local (decl2, ctx);
1254 install_var_local (decl, ctx);
1256 else
1258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1259 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1260 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1261 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1262 install_var_field (decl, true, 7, ctx);
1263 else
1264 install_var_field (decl, true, 3, ctx,
1265 base_pointers_restrict);
1266 if (is_gimple_omp_offloaded (ctx->stmt)
1267 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1268 install_var_local (decl, ctx);
1271 else
1273 tree base = get_base_address (decl);
1274 tree nc = OMP_CLAUSE_CHAIN (c);
1275 if (DECL_P (base)
1276 && nc != NULL_TREE
1277 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1278 && OMP_CLAUSE_DECL (nc) == base
1279 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1280 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1282 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1283 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1285 else
1287 if (ctx->outer)
1289 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1290 decl = OMP_CLAUSE_DECL (c);
1292 gcc_assert (!splay_tree_lookup (ctx->field_map,
1293 (splay_tree_key) decl));
1294 tree field
1295 = build_decl (OMP_CLAUSE_LOCATION (c),
1296 FIELD_DECL, NULL_TREE, ptr_type_node);
1297 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1298 insert_field_into_struct (ctx->record_type, field);
1299 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1300 (splay_tree_value) field);
1303 break;
1305 case OMP_CLAUSE__GRIDDIM_:
1306 if (ctx->outer)
1308 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1309 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1311 break;
1313 case OMP_CLAUSE_NOWAIT:
1314 case OMP_CLAUSE_ORDERED:
1315 case OMP_CLAUSE_COLLAPSE:
1316 case OMP_CLAUSE_UNTIED:
1317 case OMP_CLAUSE_MERGEABLE:
1318 case OMP_CLAUSE_PROC_BIND:
1319 case OMP_CLAUSE_SAFELEN:
1320 case OMP_CLAUSE_SIMDLEN:
1321 case OMP_CLAUSE_THREADS:
1322 case OMP_CLAUSE_SIMD:
1323 case OMP_CLAUSE_NOGROUP:
1324 case OMP_CLAUSE_DEFAULTMAP:
1325 case OMP_CLAUSE_ASYNC:
1326 case OMP_CLAUSE_WAIT:
1327 case OMP_CLAUSE_GANG:
1328 case OMP_CLAUSE_WORKER:
1329 case OMP_CLAUSE_VECTOR:
1330 case OMP_CLAUSE_INDEPENDENT:
1331 case OMP_CLAUSE_AUTO:
1332 case OMP_CLAUSE_SEQ:
1333 case OMP_CLAUSE_TILE:
1334 case OMP_CLAUSE__SIMT_:
1335 break;
1337 case OMP_CLAUSE_ALIGNED:
1338 decl = OMP_CLAUSE_DECL (c);
1339 if (is_global_var (decl)
1340 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1341 install_var_local (decl, ctx);
1342 break;
1344 case OMP_CLAUSE__CACHE_:
1345 default:
1346 gcc_unreachable ();
1350 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1352 switch (OMP_CLAUSE_CODE (c))
1354 case OMP_CLAUSE_LASTPRIVATE:
1355 /* Let the corresponding firstprivate clause create
1356 the variable. */
1357 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1358 scan_array_reductions = true;
1359 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1360 break;
1361 /* FALLTHRU */
1363 case OMP_CLAUSE_FIRSTPRIVATE:
1364 case OMP_CLAUSE_PRIVATE:
1365 case OMP_CLAUSE_LINEAR:
1366 case OMP_CLAUSE_IS_DEVICE_PTR:
1367 decl = OMP_CLAUSE_DECL (c);
1368 if (is_variable_sized (decl))
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 tree decl2 = DECL_VALUE_EXPR (decl);
1375 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1376 decl2 = TREE_OPERAND (decl2, 0);
1377 gcc_assert (DECL_P (decl2));
1378 install_var_local (decl2, ctx);
1379 fixup_remapped_decl (decl2, ctx, false);
1381 install_var_local (decl, ctx);
1383 fixup_remapped_decl (decl, ctx,
1384 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1385 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1386 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1387 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1388 scan_array_reductions = true;
1389 break;
1391 case OMP_CLAUSE_REDUCTION:
1392 decl = OMP_CLAUSE_DECL (c);
1393 if (TREE_CODE (decl) != MEM_REF)
1395 if (is_variable_sized (decl))
1396 install_var_local (decl, ctx);
1397 fixup_remapped_decl (decl, ctx, false);
1399 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1400 scan_array_reductions = true;
1401 break;
1403 case OMP_CLAUSE_SHARED:
1404 /* Ignore shared directives in teams construct. */
1405 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1406 break;
1407 decl = OMP_CLAUSE_DECL (c);
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1409 break;
1410 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1412 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1413 ctx->outer)))
1414 break;
1415 bool by_ref = use_pointer_for_field (decl, ctx);
1416 install_var_field (decl, by_ref, 11, ctx);
1417 break;
1419 fixup_remapped_decl (decl, ctx, false);
1420 break;
1422 case OMP_CLAUSE_MAP:
1423 if (!is_gimple_omp_offloaded (ctx->stmt))
1424 break;
1425 decl = OMP_CLAUSE_DECL (c);
1426 if (DECL_P (decl)
1427 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1428 && (OMP_CLAUSE_MAP_KIND (c)
1429 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1430 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1431 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1432 && varpool_node::get_create (decl)->offloadable)
1433 break;
1434 if (DECL_P (decl))
1436 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1437 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1438 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1439 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1441 tree new_decl = lookup_decl (decl, ctx);
1442 TREE_TYPE (new_decl)
1443 = remap_type (TREE_TYPE (decl), &ctx->cb);
1445 else if (DECL_SIZE (decl)
1446 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1448 tree decl2 = DECL_VALUE_EXPR (decl);
1449 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1450 decl2 = TREE_OPERAND (decl2, 0);
1451 gcc_assert (DECL_P (decl2));
1452 fixup_remapped_decl (decl2, ctx, false);
1453 fixup_remapped_decl (decl, ctx, true);
1455 else
1456 fixup_remapped_decl (decl, ctx, false);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 case OMP_CLAUSE_DEFAULT:
1463 case OMP_CLAUSE_IF:
1464 case OMP_CLAUSE_NUM_THREADS:
1465 case OMP_CLAUSE_NUM_TEAMS:
1466 case OMP_CLAUSE_THREAD_LIMIT:
1467 case OMP_CLAUSE_DEVICE:
1468 case OMP_CLAUSE_SCHEDULE:
1469 case OMP_CLAUSE_DIST_SCHEDULE:
1470 case OMP_CLAUSE_NOWAIT:
1471 case OMP_CLAUSE_ORDERED:
1472 case OMP_CLAUSE_COLLAPSE:
1473 case OMP_CLAUSE_UNTIED:
1474 case OMP_CLAUSE_FINAL:
1475 case OMP_CLAUSE_MERGEABLE:
1476 case OMP_CLAUSE_PROC_BIND:
1477 case OMP_CLAUSE_SAFELEN:
1478 case OMP_CLAUSE_SIMDLEN:
1479 case OMP_CLAUSE_ALIGNED:
1480 case OMP_CLAUSE_DEPEND:
1481 case OMP_CLAUSE__LOOPTEMP_:
1482 case OMP_CLAUSE_TO:
1483 case OMP_CLAUSE_FROM:
1484 case OMP_CLAUSE_PRIORITY:
1485 case OMP_CLAUSE_GRAINSIZE:
1486 case OMP_CLAUSE_NUM_TASKS:
1487 case OMP_CLAUSE_THREADS:
1488 case OMP_CLAUSE_SIMD:
1489 case OMP_CLAUSE_NOGROUP:
1490 case OMP_CLAUSE_DEFAULTMAP:
1491 case OMP_CLAUSE_USE_DEVICE_PTR:
1492 case OMP_CLAUSE__CILK_FOR_COUNT_:
1493 case OMP_CLAUSE_ASYNC:
1494 case OMP_CLAUSE_WAIT:
1495 case OMP_CLAUSE_NUM_GANGS:
1496 case OMP_CLAUSE_NUM_WORKERS:
1497 case OMP_CLAUSE_VECTOR_LENGTH:
1498 case OMP_CLAUSE_GANG:
1499 case OMP_CLAUSE_WORKER:
1500 case OMP_CLAUSE_VECTOR:
1501 case OMP_CLAUSE_INDEPENDENT:
1502 case OMP_CLAUSE_AUTO:
1503 case OMP_CLAUSE_SEQ:
1504 case OMP_CLAUSE_TILE:
1505 case OMP_CLAUSE__GRIDDIM_:
1506 case OMP_CLAUSE__SIMT_:
1507 break;
1509 case OMP_CLAUSE__CACHE_:
1510 default:
1511 gcc_unreachable ();
1515 gcc_checking_assert (!scan_array_reductions
1516 || !is_gimple_omp_oacc (ctx->stmt));
1517 if (scan_array_reductions)
1519 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1521 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1523 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1524 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1527 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1529 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1530 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1531 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1535 /* Create a new name for omp child function. Returns an identifier. If
1536 IS_CILK_FOR is true then the suffix for the child function is
1537 "_cilk_for_fn." */
1539 static tree
1540 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1542 if (is_cilk_for)
1543 return clone_function_name (current_function_decl, "_cilk_for_fn");
1544 return clone_function_name (current_function_decl,
1545 task_copy ? "_omp_cpyfn" : "_omp_fn");
1548 /* Returns the type of the induction variable for the child function for
1549 _Cilk_for and the types for _high and _low variables based on TYPE. */
1551 static tree
1552 cilk_for_check_loop_diff_type (tree type)
1554 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1556 if (TYPE_UNSIGNED (type))
1557 return uint32_type_node;
1558 else
1559 return integer_type_node;
1561 else
1563 if (TYPE_UNSIGNED (type))
1564 return uint64_type_node;
1565 else
1566 return long_long_integer_type_node;
1570 /* Return true if CTX may belong to offloaded code: either if current function
1571 is offloaded, or any enclosing context corresponds to a target region. */
1573 static bool
1574 omp_maybe_offloaded_ctx (omp_context *ctx)
1576 if (cgraph_node::get (current_function_decl)->offloadable)
1577 return true;
1578 for (; ctx; ctx = ctx->outer)
1579 if (is_gimple_omp_offloaded (ctx->stmt))
1580 return true;
1581 return false;
1584 /* Build a decl for the omp child function. It'll not contain a body
1585 yet, just the bare decl. */
1587 static void
1588 create_omp_child_function (omp_context *ctx, bool task_copy)
1590 tree decl, type, name, t;
1592 tree cilk_for_count
1593 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1594 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1595 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1596 tree cilk_var_type = NULL_TREE;
1598 name = create_omp_child_function_name (task_copy,
1599 cilk_for_count != NULL_TREE);
1600 if (task_copy)
1601 type = build_function_type_list (void_type_node, ptr_type_node,
1602 ptr_type_node, NULL_TREE);
1603 else if (cilk_for_count)
1605 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1606 cilk_var_type = cilk_for_check_loop_diff_type (type);
1607 type = build_function_type_list (void_type_node, ptr_type_node,
1608 cilk_var_type, cilk_var_type, NULL_TREE);
1610 else
1611 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1613 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1615 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1616 || !task_copy);
1617 if (!task_copy)
1618 ctx->cb.dst_fn = decl;
1619 else
1620 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1622 TREE_STATIC (decl) = 1;
1623 TREE_USED (decl) = 1;
1624 DECL_ARTIFICIAL (decl) = 1;
1625 DECL_IGNORED_P (decl) = 0;
1626 TREE_PUBLIC (decl) = 0;
1627 DECL_UNINLINABLE (decl) = 1;
1628 DECL_EXTERNAL (decl) = 0;
1629 DECL_CONTEXT (decl) = NULL_TREE;
1630 DECL_INITIAL (decl) = make_node (BLOCK);
1631 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1632 if (omp_maybe_offloaded_ctx (ctx))
1634 cgraph_node::get_create (decl)->offloadable = 1;
1635 if (ENABLE_OFFLOADING)
1636 g->have_offload = true;
1639 if (cgraph_node::get_create (decl)->offloadable
1640 && !lookup_attribute ("omp declare target",
1641 DECL_ATTRIBUTES (current_function_decl)))
1643 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1644 ? "omp target entrypoint"
1645 : "omp declare target");
1646 DECL_ATTRIBUTES (decl)
1647 = tree_cons (get_identifier (target_attr),
1648 NULL_TREE, DECL_ATTRIBUTES (decl));
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 RESULT_DECL, NULL_TREE, void_type_node);
1653 DECL_ARTIFICIAL (t) = 1;
1654 DECL_IGNORED_P (t) = 1;
1655 DECL_CONTEXT (t) = decl;
1656 DECL_RESULT (decl) = t;
1658 /* _Cilk_for's child function requires two extra parameters called
1659 __low and __high that are set the by Cilk runtime when it calls this
1660 function. */
1661 if (cilk_for_count)
1663 t = build_decl (DECL_SOURCE_LOCATION (decl),
1664 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1665 DECL_ARTIFICIAL (t) = 1;
1666 DECL_NAMELESS (t) = 1;
1667 DECL_ARG_TYPE (t) = ptr_type_node;
1668 DECL_CONTEXT (t) = current_function_decl;
1669 TREE_USED (t) = 1;
1670 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1671 DECL_ARGUMENTS (decl) = t;
1673 t = build_decl (DECL_SOURCE_LOCATION (decl),
1674 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1675 DECL_ARTIFICIAL (t) = 1;
1676 DECL_NAMELESS (t) = 1;
1677 DECL_ARG_TYPE (t) = ptr_type_node;
1678 DECL_CONTEXT (t) = current_function_decl;
1679 TREE_USED (t) = 1;
1680 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1681 DECL_ARGUMENTS (decl) = t;
1684 tree data_name = get_identifier (".omp_data_i");
1685 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1686 ptr_type_node);
1687 DECL_ARTIFICIAL (t) = 1;
1688 DECL_NAMELESS (t) = 1;
1689 DECL_ARG_TYPE (t) = ptr_type_node;
1690 DECL_CONTEXT (t) = current_function_decl;
1691 TREE_USED (t) = 1;
1692 TREE_READONLY (t) = 1;
1693 if (cilk_for_count)
1694 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1695 DECL_ARGUMENTS (decl) = t;
1696 if (!task_copy)
1697 ctx->receiver_decl = t;
1698 else
1700 t = build_decl (DECL_SOURCE_LOCATION (decl),
1701 PARM_DECL, get_identifier (".omp_data_o"),
1702 ptr_type_node);
1703 DECL_ARTIFICIAL (t) = 1;
1704 DECL_NAMELESS (t) = 1;
1705 DECL_ARG_TYPE (t) = ptr_type_node;
1706 DECL_CONTEXT (t) = current_function_decl;
1707 TREE_USED (t) = 1;
1708 TREE_ADDRESSABLE (t) = 1;
1709 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1710 DECL_ARGUMENTS (decl) = t;
1713 /* Allocate memory for the function structure. The call to
1714 allocate_struct_function clobbers CFUN, so we need to restore
1715 it afterward. */
1716 push_struct_function (decl);
1717 cfun->function_end_locus = gimple_location (ctx->stmt);
1718 init_tree_ssa (cfun);
1719 pop_cfun ();
1722 /* Callback for walk_gimple_seq. Check if combined parallel
1723 contains gimple_omp_for_combined_into_p OMP_FOR. */
1725 tree
1726 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1727 bool *handled_ops_p,
1728 struct walk_stmt_info *wi)
1730 gimple *stmt = gsi_stmt (*gsi_p);
1732 *handled_ops_p = true;
1733 switch (gimple_code (stmt))
1735 WALK_SUBSTMTS;
1737 case GIMPLE_OMP_FOR:
1738 if (gimple_omp_for_combined_into_p (stmt)
1739 && gimple_omp_for_kind (stmt)
1740 == *(const enum gf_mask *) (wi->info))
1742 wi->info = stmt;
1743 return integer_zero_node;
1745 break;
1746 default:
1747 break;
1749 return NULL;
1752 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1754 static void
1755 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1756 omp_context *outer_ctx)
1758 struct walk_stmt_info wi;
1760 memset (&wi, 0, sizeof (wi));
1761 wi.val_only = true;
1762 wi.info = (void *) &msk;
1763 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1764 if (wi.info != (void *) &msk)
1766 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1767 struct omp_for_data fd;
1768 omp_extract_for_data (for_stmt, &fd, NULL);
1769 /* We need two temporaries with fd.loop.v type (istart/iend)
1770 and then (fd.collapse - 1) temporaries with the same
1771 type for count2 ... countN-1 vars if not constant. */
1772 size_t count = 2, i;
1773 tree type = fd.iter_type;
1774 if (fd.collapse > 1
1775 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1777 count += fd.collapse - 1;
1778 /* If there are lastprivate clauses on the inner
1779 GIMPLE_OMP_FOR, add one more temporaries for the total number
1780 of iterations (product of count1 ... countN-1). */
1781 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1782 OMP_CLAUSE_LASTPRIVATE))
1783 count++;
1784 else if (msk == GF_OMP_FOR_KIND_FOR
1785 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1786 OMP_CLAUSE_LASTPRIVATE))
1787 count++;
1789 for (i = 0; i < count; i++)
1791 tree temp = create_tmp_var (type);
1792 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1793 insert_decl_map (&outer_ctx->cb, temp, temp);
1794 OMP_CLAUSE_DECL (c) = temp;
1795 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1796 gimple_omp_taskreg_set_clauses (stmt, c);
1801 /* Scan an OpenMP parallel directive. */
1803 static void
1804 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 omp_context *ctx;
1807 tree name;
1808 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1810 /* Ignore parallel directives with empty bodies, unless there
1811 are copyin clauses. */
1812 if (optimize > 0
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1815 OMP_CLAUSE_COPYIN) == NULL)
1817 gsi_replace (gsi, gimple_build_nop (), false);
1818 return;
1821 if (gimple_omp_parallel_combined_p (stmt))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1824 ctx = new_omp_context (stmt, outer_ctx);
1825 taskreg_contexts.safe_push (ctx);
1826 if (taskreg_nesting_level > 1)
1827 ctx->is_nested = true;
1828 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1829 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1830 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1831 name = create_tmp_var_name (".omp_data_s");
1832 name = build_decl (gimple_location (stmt),
1833 TYPE_DECL, name, ctx->record_type);
1834 DECL_ARTIFICIAL (name) = 1;
1835 DECL_NAMELESS (name) = 1;
1836 TYPE_NAME (ctx->record_type) = name;
1837 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1838 if (!gimple_omp_parallel_grid_phony (stmt))
1840 create_omp_child_function (ctx, false);
1841 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1844 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1845 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1847 if (TYPE_FIELDS (ctx->record_type) == NULL)
1848 ctx->record_type = ctx->receiver_decl = NULL;
1851 /* Scan an OpenMP task directive. */
1853 static void
1854 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1856 omp_context *ctx;
1857 tree name, t;
1858 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1860 /* Ignore task directives with empty bodies. */
1861 if (optimize > 0
1862 && empty_body_p (gimple_omp_body (stmt)))
1864 gsi_replace (gsi, gimple_build_nop (), false);
1865 return;
1868 if (gimple_omp_task_taskloop_p (stmt))
1869 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1871 ctx = new_omp_context (stmt, outer_ctx);
1872 taskreg_contexts.safe_push (ctx);
1873 if (taskreg_nesting_level > 1)
1874 ctx->is_nested = true;
1875 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1876 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1877 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1878 name = create_tmp_var_name (".omp_data_s");
1879 name = build_decl (gimple_location (stmt),
1880 TYPE_DECL, name, ctx->record_type);
1881 DECL_ARTIFICIAL (name) = 1;
1882 DECL_NAMELESS (name) = 1;
1883 TYPE_NAME (ctx->record_type) = name;
1884 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1885 create_omp_child_function (ctx, false);
1886 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1888 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1890 if (ctx->srecord_type)
1892 name = create_tmp_var_name (".omp_data_a");
1893 name = build_decl (gimple_location (stmt),
1894 TYPE_DECL, name, ctx->srecord_type);
1895 DECL_ARTIFICIAL (name) = 1;
1896 DECL_NAMELESS (name) = 1;
1897 TYPE_NAME (ctx->srecord_type) = name;
1898 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1899 create_omp_child_function (ctx, true);
1902 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1904 if (TYPE_FIELDS (ctx->record_type) == NULL)
1906 ctx->record_type = ctx->receiver_decl = NULL;
1907 t = build_int_cst (long_integer_type_node, 0);
1908 gimple_omp_task_set_arg_size (stmt, t);
1909 t = build_int_cst (long_integer_type_node, 1);
1910 gimple_omp_task_set_arg_align (stmt, t);
1915 /* If any decls have been made addressable during scan_omp,
1916 adjust their fields if needed, and layout record types
1917 of parallel/task constructs. */
1919 static void
1920 finish_taskreg_scan (omp_context *ctx)
1922 if (ctx->record_type == NULL_TREE)
1923 return;
1925 /* If any task_shared_vars were needed, verify all
1926 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1927 statements if use_pointer_for_field hasn't changed
1928 because of that. If it did, update field types now. */
1929 if (task_shared_vars)
1931 tree c;
1933 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1934 c; c = OMP_CLAUSE_CHAIN (c))
1935 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1936 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1938 tree decl = OMP_CLAUSE_DECL (c);
1940 /* Global variables don't need to be copied,
1941 the receiver side will use them directly. */
1942 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1943 continue;
1944 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1945 || !use_pointer_for_field (decl, ctx))
1946 continue;
1947 tree field = lookup_field (decl, ctx);
1948 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1949 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1950 continue;
1951 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1952 TREE_THIS_VOLATILE (field) = 0;
1953 DECL_USER_ALIGN (field) = 0;
1954 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1955 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1956 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1957 if (ctx->srecord_type)
1959 tree sfield = lookup_sfield (decl, ctx);
1960 TREE_TYPE (sfield) = TREE_TYPE (field);
1961 TREE_THIS_VOLATILE (sfield) = 0;
1962 DECL_USER_ALIGN (sfield) = 0;
1963 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1964 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1965 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1970 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1972 layout_type (ctx->record_type);
1973 fixup_child_record_type (ctx);
1975 else
1977 location_t loc = gimple_location (ctx->stmt);
1978 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1979 /* Move VLA fields to the end. */
1980 p = &TYPE_FIELDS (ctx->record_type);
1981 while (*p)
1982 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1983 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1985 *q = *p;
1986 *p = TREE_CHAIN (*p);
1987 TREE_CHAIN (*q) = NULL_TREE;
1988 q = &TREE_CHAIN (*q);
1990 else
1991 p = &DECL_CHAIN (*p);
1992 *p = vla_fields;
1993 if (gimple_omp_task_taskloop_p (ctx->stmt))
1995 /* Move fields corresponding to first and second _looptemp_
1996 clause first. There are filled by GOMP_taskloop
1997 and thus need to be in specific positions. */
1998 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1999 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2000 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2001 OMP_CLAUSE__LOOPTEMP_);
2002 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2003 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2004 p = &TYPE_FIELDS (ctx->record_type);
2005 while (*p)
2006 if (*p == f1 || *p == f2)
2007 *p = DECL_CHAIN (*p);
2008 else
2009 p = &DECL_CHAIN (*p);
2010 DECL_CHAIN (f1) = f2;
2011 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2012 TYPE_FIELDS (ctx->record_type) = f1;
2013 if (ctx->srecord_type)
2015 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2016 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2017 p = &TYPE_FIELDS (ctx->srecord_type);
2018 while (*p)
2019 if (*p == f1 || *p == f2)
2020 *p = DECL_CHAIN (*p);
2021 else
2022 p = &DECL_CHAIN (*p);
2023 DECL_CHAIN (f1) = f2;
2024 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2025 TYPE_FIELDS (ctx->srecord_type) = f1;
2028 layout_type (ctx->record_type);
2029 fixup_child_record_type (ctx);
2030 if (ctx->srecord_type)
2031 layout_type (ctx->srecord_type);
2032 tree t = fold_convert_loc (loc, long_integer_type_node,
2033 TYPE_SIZE_UNIT (ctx->record_type));
2034 gimple_omp_task_set_arg_size (ctx->stmt, t);
2035 t = build_int_cst (long_integer_type_node,
2036 TYPE_ALIGN_UNIT (ctx->record_type));
2037 gimple_omp_task_set_arg_align (ctx->stmt, t);
2041 /* Find the enclosing offload context. */
2043 static omp_context *
2044 enclosing_target_ctx (omp_context *ctx)
2046 for (; ctx; ctx = ctx->outer)
2047 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2048 break;
2050 return ctx;
2053 /* Return true if ctx is part of an oacc kernels region. */
2055 static bool
2056 ctx_in_oacc_kernels_region (omp_context *ctx)
2058 for (;ctx != NULL; ctx = ctx->outer)
2060 gimple *stmt = ctx->stmt;
2061 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2062 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2063 return true;
2066 return false;
2069 /* Check the parallelism clauses inside a kernels regions.
2070 Until kernels handling moves to use the same loop indirection
2071 scheme as parallel, we need to do this checking early. */
2073 static unsigned
2074 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2076 bool checking = true;
2077 unsigned outer_mask = 0;
2078 unsigned this_mask = 0;
2079 bool has_seq = false, has_auto = false;
2081 if (ctx->outer)
2082 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2083 if (!stmt)
2085 checking = false;
2086 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2087 return outer_mask;
2088 stmt = as_a <gomp_for *> (ctx->stmt);
2091 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2093 switch (OMP_CLAUSE_CODE (c))
2095 case OMP_CLAUSE_GANG:
2096 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2097 break;
2098 case OMP_CLAUSE_WORKER:
2099 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2100 break;
2101 case OMP_CLAUSE_VECTOR:
2102 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2103 break;
2104 case OMP_CLAUSE_SEQ:
2105 has_seq = true;
2106 break;
2107 case OMP_CLAUSE_AUTO:
2108 has_auto = true;
2109 break;
2110 default:
2111 break;
2115 if (checking)
2117 if (has_seq && (this_mask || has_auto))
2118 error_at (gimple_location (stmt), "%<seq%> overrides other"
2119 " OpenACC loop specifiers");
2120 else if (has_auto && this_mask)
2121 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2122 " OpenACC loop specifiers");
2124 if (this_mask & outer_mask)
2125 error_at (gimple_location (stmt), "inner loop uses same"
2126 " OpenACC parallelism as containing loop");
2129 return outer_mask | this_mask;
2132 /* Scan a GIMPLE_OMP_FOR. */
2134 static omp_context *
2135 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2137 omp_context *ctx;
2138 size_t i;
2139 tree clauses = gimple_omp_for_clauses (stmt);
2141 ctx = new_omp_context (stmt, outer_ctx);
2143 if (is_gimple_omp_oacc (stmt))
2145 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2147 if (!tgt || is_oacc_parallel (tgt))
2148 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2150 char const *check = NULL;
2152 switch (OMP_CLAUSE_CODE (c))
2154 case OMP_CLAUSE_GANG:
2155 check = "gang";
2156 break;
2158 case OMP_CLAUSE_WORKER:
2159 check = "worker";
2160 break;
2162 case OMP_CLAUSE_VECTOR:
2163 check = "vector";
2164 break;
2166 default:
2167 break;
2170 if (check && OMP_CLAUSE_OPERAND (c, 0))
2171 error_at (gimple_location (stmt),
2172 "argument not permitted on %qs clause in"
2173 " OpenACC %<parallel%>", check);
2176 if (tgt && is_oacc_kernels (tgt))
2178 /* Strip out reductions, as they are not handled yet. */
2179 tree *prev_ptr = &clauses;
2181 while (tree probe = *prev_ptr)
2183 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2185 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2186 *prev_ptr = *next_ptr;
2187 else
2188 prev_ptr = next_ptr;
2191 gimple_omp_for_set_clauses (stmt, clauses);
2192 check_oacc_kernel_gwv (stmt, ctx);
2196 scan_sharing_clauses (clauses, ctx);
2198 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2199 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2201 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2202 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2203 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2204 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2206 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2207 return ctx;
2210 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2212 static void
2213 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2214 omp_context *outer_ctx)
2216 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2217 gsi_replace (gsi, bind, false);
2218 gimple_seq seq = NULL;
2219 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2220 tree cond = create_tmp_var_raw (integer_type_node);
2221 DECL_CONTEXT (cond) = current_function_decl;
2222 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2223 gimple_bind_set_vars (bind, cond);
2224 gimple_call_set_lhs (g, cond);
2225 gimple_seq_add_stmt (&seq, g);
2226 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2227 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2228 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2229 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2230 gimple_seq_add_stmt (&seq, g);
2231 g = gimple_build_label (lab1);
2232 gimple_seq_add_stmt (&seq, g);
2233 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2234 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2235 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2236 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2237 gimple_omp_for_set_clauses (new_stmt, clause);
2238 gimple_seq_add_stmt (&seq, new_stmt);
2239 g = gimple_build_goto (lab3);
2240 gimple_seq_add_stmt (&seq, g);
2241 g = gimple_build_label (lab2);
2242 gimple_seq_add_stmt (&seq, g);
2243 gimple_seq_add_stmt (&seq, stmt);
2244 g = gimple_build_label (lab3);
2245 gimple_seq_add_stmt (&seq, g);
2246 gimple_bind_set_body (bind, seq);
2247 update_stmt (bind);
2248 scan_omp_for (new_stmt, outer_ctx);
2249 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2252 /* Scan an OpenMP sections directive. */
2254 static void
2255 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2257 omp_context *ctx;
2259 ctx = new_omp_context (stmt, outer_ctx);
2260 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2261 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2264 /* Scan an OpenMP single directive. */
2266 static void
2267 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2269 omp_context *ctx;
2270 tree name;
2272 ctx = new_omp_context (stmt, outer_ctx);
2273 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2274 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2275 name = create_tmp_var_name (".omp_copy_s");
2276 name = build_decl (gimple_location (stmt),
2277 TYPE_DECL, name, ctx->record_type);
2278 TYPE_NAME (ctx->record_type) = name;
2280 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2281 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2283 if (TYPE_FIELDS (ctx->record_type) == NULL)
2284 ctx->record_type = NULL;
2285 else
2286 layout_type (ctx->record_type);
2289 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2290 used in the corresponding offloaded function are restrict. */
2292 static bool
2293 omp_target_base_pointers_restrict_p (tree clauses)
2295 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2296 used by OpenACC. */
2297 if (flag_openacc == 0)
2298 return false;
2300 /* I. Basic example:
2302 void foo (void)
2304 unsigned int a[2], b[2];
2306 #pragma acc kernels \
2307 copyout (a) \
2308 copyout (b)
2310 a[0] = 0;
2311 b[0] = 1;
2315 After gimplification, we have:
2317 #pragma omp target oacc_kernels \
2318 map(force_from:a [len: 8]) \
2319 map(force_from:b [len: 8])
2321 a[0] = 0;
2322 b[0] = 1;
2325 Because both mappings have the force prefix, we know that they will be
2326 allocated when calling the corresponding offloaded function, which means we
2327 can mark the base pointers for a and b in the offloaded function as
2328 restrict. */
2330 tree c;
2331 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2333 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2334 return false;
2336 switch (OMP_CLAUSE_MAP_KIND (c))
2338 case GOMP_MAP_FORCE_ALLOC:
2339 case GOMP_MAP_FORCE_TO:
2340 case GOMP_MAP_FORCE_FROM:
2341 case GOMP_MAP_FORCE_TOFROM:
2342 break;
2343 default:
2344 return false;
2348 return true;
2351 /* Scan a GIMPLE_OMP_TARGET. */
2353 static void
2354 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2356 omp_context *ctx;
2357 tree name;
2358 bool offloaded = is_gimple_omp_offloaded (stmt);
2359 tree clauses = gimple_omp_target_clauses (stmt);
2361 ctx = new_omp_context (stmt, outer_ctx);
2362 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2363 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2364 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2365 name = create_tmp_var_name (".omp_data_t");
2366 name = build_decl (gimple_location (stmt),
2367 TYPE_DECL, name, ctx->record_type);
2368 DECL_ARTIFICIAL (name) = 1;
2369 DECL_NAMELESS (name) = 1;
2370 TYPE_NAME (ctx->record_type) = name;
2371 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2373 bool base_pointers_restrict = false;
2374 if (offloaded)
2376 create_omp_child_function (ctx, false);
2377 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2379 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2380 if (base_pointers_restrict
2381 && dump_file && (dump_flags & TDF_DETAILS))
2382 fprintf (dump_file,
2383 "Base pointers in offloaded function are restrict\n");
2386 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2387 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2389 if (TYPE_FIELDS (ctx->record_type) == NULL)
2390 ctx->record_type = ctx->receiver_decl = NULL;
2391 else
2393 TYPE_FIELDS (ctx->record_type)
2394 = nreverse (TYPE_FIELDS (ctx->record_type));
2395 if (flag_checking)
2397 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2398 for (tree field = TYPE_FIELDS (ctx->record_type);
2399 field;
2400 field = DECL_CHAIN (field))
2401 gcc_assert (DECL_ALIGN (field) == align);
2403 layout_type (ctx->record_type);
2404 if (offloaded)
2405 fixup_child_record_type (ctx);
2409 /* Scan an OpenMP teams directive. */
2411 static void
2412 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2414 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2415 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2416 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2419 /* Check nesting restrictions. */
2420 static bool
2421 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2423 tree c;
2425 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2426 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2427 the original copy of its contents. */
2428 return true;
2430 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2431 inside an OpenACC CTX. */
2432 if (!(is_gimple_omp (stmt)
2433 && is_gimple_omp_oacc (stmt))
2434 /* Except for atomic codes that we share with OpenMP. */
2435 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2436 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2438 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2440 error_at (gimple_location (stmt),
2441 "non-OpenACC construct inside of OpenACC routine");
2442 return false;
2444 else
2445 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2446 if (is_gimple_omp (octx->stmt)
2447 && is_gimple_omp_oacc (octx->stmt))
2449 error_at (gimple_location (stmt),
2450 "non-OpenACC construct inside of OpenACC region");
2451 return false;
2455 if (ctx != NULL)
2457 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2458 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2460 c = NULL_TREE;
2461 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2463 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2464 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2466 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2467 && (ctx->outer == NULL
2468 || !gimple_omp_for_combined_into_p (ctx->stmt)
2469 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2470 || (gimple_omp_for_kind (ctx->outer->stmt)
2471 != GF_OMP_FOR_KIND_FOR)
2472 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2474 error_at (gimple_location (stmt),
2475 "%<ordered simd threads%> must be closely "
2476 "nested inside of %<for simd%> region");
2477 return false;
2479 return true;
2482 error_at (gimple_location (stmt),
2483 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2484 " may not be nested inside %<simd%> region");
2485 return false;
2487 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2489 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2490 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2491 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2492 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2494 error_at (gimple_location (stmt),
2495 "only %<distribute%> or %<parallel%> regions are "
2496 "allowed to be strictly nested inside %<teams%> "
2497 "region");
2498 return false;
2502 switch (gimple_code (stmt))
2504 case GIMPLE_OMP_FOR:
2505 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2506 return true;
2507 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2509 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2511 error_at (gimple_location (stmt),
2512 "%<distribute%> region must be strictly nested "
2513 "inside %<teams%> construct");
2514 return false;
2516 return true;
2518 /* We split taskloop into task and nested taskloop in it. */
2519 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2520 return true;
2521 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2523 bool ok = false;
2525 if (ctx)
2526 switch (gimple_code (ctx->stmt))
2528 case GIMPLE_OMP_FOR:
2529 ok = (gimple_omp_for_kind (ctx->stmt)
2530 == GF_OMP_FOR_KIND_OACC_LOOP);
2531 break;
2533 case GIMPLE_OMP_TARGET:
2534 switch (gimple_omp_target_kind (ctx->stmt))
2536 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2537 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2538 ok = true;
2539 break;
2541 default:
2542 break;
2545 default:
2546 break;
2548 else if (oacc_get_fn_attrib (current_function_decl))
2549 ok = true;
2550 if (!ok)
2552 error_at (gimple_location (stmt),
2553 "OpenACC loop directive must be associated with"
2554 " an OpenACC compute region");
2555 return false;
2558 /* FALLTHRU */
2559 case GIMPLE_CALL:
2560 if (is_gimple_call (stmt)
2561 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2562 == BUILT_IN_GOMP_CANCEL
2563 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2564 == BUILT_IN_GOMP_CANCELLATION_POINT))
2566 const char *bad = NULL;
2567 const char *kind = NULL;
2568 const char *construct
2569 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2570 == BUILT_IN_GOMP_CANCEL)
2571 ? "#pragma omp cancel"
2572 : "#pragma omp cancellation point";
2573 if (ctx == NULL)
2575 error_at (gimple_location (stmt), "orphaned %qs construct",
2576 construct);
2577 return false;
2579 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2580 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2581 : 0)
2583 case 1:
2584 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2585 bad = "#pragma omp parallel";
2586 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2587 == BUILT_IN_GOMP_CANCEL
2588 && !integer_zerop (gimple_call_arg (stmt, 1)))
2589 ctx->cancellable = true;
2590 kind = "parallel";
2591 break;
2592 case 2:
2593 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2594 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2595 bad = "#pragma omp for";
2596 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2597 == BUILT_IN_GOMP_CANCEL
2598 && !integer_zerop (gimple_call_arg (stmt, 1)))
2600 ctx->cancellable = true;
2601 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2602 OMP_CLAUSE_NOWAIT))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel for%> inside "
2605 "%<nowait%> for construct");
2606 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2607 OMP_CLAUSE_ORDERED))
2608 warning_at (gimple_location (stmt), 0,
2609 "%<#pragma omp cancel for%> inside "
2610 "%<ordered%> for construct");
2612 kind = "for";
2613 break;
2614 case 4:
2615 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2616 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2617 bad = "#pragma omp sections";
2618 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2619 == BUILT_IN_GOMP_CANCEL
2620 && !integer_zerop (gimple_call_arg (stmt, 1)))
2622 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2624 ctx->cancellable = true;
2625 if (omp_find_clause (gimple_omp_sections_clauses
2626 (ctx->stmt),
2627 OMP_CLAUSE_NOWAIT))
2628 warning_at (gimple_location (stmt), 0,
2629 "%<#pragma omp cancel sections%> inside "
2630 "%<nowait%> sections construct");
2632 else
2634 gcc_assert (ctx->outer
2635 && gimple_code (ctx->outer->stmt)
2636 == GIMPLE_OMP_SECTIONS);
2637 ctx->outer->cancellable = true;
2638 if (omp_find_clause (gimple_omp_sections_clauses
2639 (ctx->outer->stmt),
2640 OMP_CLAUSE_NOWAIT))
2641 warning_at (gimple_location (stmt), 0,
2642 "%<#pragma omp cancel sections%> inside "
2643 "%<nowait%> sections construct");
2646 kind = "sections";
2647 break;
2648 case 8:
2649 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2650 bad = "#pragma omp task";
2651 else
2653 for (omp_context *octx = ctx->outer;
2654 octx; octx = octx->outer)
2656 switch (gimple_code (octx->stmt))
2658 case GIMPLE_OMP_TASKGROUP:
2659 break;
2660 case GIMPLE_OMP_TARGET:
2661 if (gimple_omp_target_kind (octx->stmt)
2662 != GF_OMP_TARGET_KIND_REGION)
2663 continue;
2664 /* FALLTHRU */
2665 case GIMPLE_OMP_PARALLEL:
2666 case GIMPLE_OMP_TEAMS:
2667 error_at (gimple_location (stmt),
2668 "%<%s taskgroup%> construct not closely "
2669 "nested inside of %<taskgroup%> region",
2670 construct);
2671 return false;
2672 default:
2673 continue;
2675 break;
2677 ctx->cancellable = true;
2679 kind = "taskgroup";
2680 break;
2681 default:
2682 error_at (gimple_location (stmt), "invalid arguments");
2683 return false;
2685 if (bad)
2687 error_at (gimple_location (stmt),
2688 "%<%s %s%> construct not closely nested inside of %qs",
2689 construct, kind, bad);
2690 return false;
2693 /* FALLTHRU */
2694 case GIMPLE_OMP_SECTIONS:
2695 case GIMPLE_OMP_SINGLE:
2696 for (; ctx != NULL; ctx = ctx->outer)
2697 switch (gimple_code (ctx->stmt))
2699 case GIMPLE_OMP_FOR:
2700 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2701 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2702 break;
2703 /* FALLTHRU */
2704 case GIMPLE_OMP_SECTIONS:
2705 case GIMPLE_OMP_SINGLE:
2706 case GIMPLE_OMP_ORDERED:
2707 case GIMPLE_OMP_MASTER:
2708 case GIMPLE_OMP_TASK:
2709 case GIMPLE_OMP_CRITICAL:
2710 if (is_gimple_call (stmt))
2712 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2713 != BUILT_IN_GOMP_BARRIER)
2714 return true;
2715 error_at (gimple_location (stmt),
2716 "barrier region may not be closely nested inside "
2717 "of work-sharing, %<critical%>, %<ordered%>, "
2718 "%<master%>, explicit %<task%> or %<taskloop%> "
2719 "region");
2720 return false;
2722 error_at (gimple_location (stmt),
2723 "work-sharing region may not be closely nested inside "
2724 "of work-sharing, %<critical%>, %<ordered%>, "
2725 "%<master%>, explicit %<task%> or %<taskloop%> region");
2726 return false;
2727 case GIMPLE_OMP_PARALLEL:
2728 case GIMPLE_OMP_TEAMS:
2729 return true;
2730 case GIMPLE_OMP_TARGET:
2731 if (gimple_omp_target_kind (ctx->stmt)
2732 == GF_OMP_TARGET_KIND_REGION)
2733 return true;
2734 break;
2735 default:
2736 break;
2738 break;
2739 case GIMPLE_OMP_MASTER:
2740 for (; ctx != NULL; ctx = ctx->outer)
2741 switch (gimple_code (ctx->stmt))
2743 case GIMPLE_OMP_FOR:
2744 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2745 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2746 break;
2747 /* FALLTHRU */
2748 case GIMPLE_OMP_SECTIONS:
2749 case GIMPLE_OMP_SINGLE:
2750 case GIMPLE_OMP_TASK:
2751 error_at (gimple_location (stmt),
2752 "%<master%> region may not be closely nested inside "
2753 "of work-sharing, explicit %<task%> or %<taskloop%> "
2754 "region");
2755 return false;
2756 case GIMPLE_OMP_PARALLEL:
2757 case GIMPLE_OMP_TEAMS:
2758 return true;
2759 case GIMPLE_OMP_TARGET:
2760 if (gimple_omp_target_kind (ctx->stmt)
2761 == GF_OMP_TARGET_KIND_REGION)
2762 return true;
2763 break;
2764 default:
2765 break;
2767 break;
2768 case GIMPLE_OMP_TASK:
2769 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2770 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2771 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2772 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2774 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2775 error_at (OMP_CLAUSE_LOCATION (c),
2776 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2777 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2778 return false;
2780 break;
2781 case GIMPLE_OMP_ORDERED:
2782 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2783 c; c = OMP_CLAUSE_CHAIN (c))
2785 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2787 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2788 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2789 continue;
2791 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2792 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2793 || kind == OMP_CLAUSE_DEPEND_SINK)
2795 tree oclause;
2796 /* Look for containing ordered(N) loop. */
2797 if (ctx == NULL
2798 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2799 || (oclause
2800 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2801 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2803 error_at (OMP_CLAUSE_LOCATION (c),
2804 "%<ordered%> construct with %<depend%> clause "
2805 "must be closely nested inside an %<ordered%> "
2806 "loop");
2807 return false;
2809 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2811 error_at (OMP_CLAUSE_LOCATION (c),
2812 "%<ordered%> construct with %<depend%> clause "
2813 "must be closely nested inside a loop with "
2814 "%<ordered%> clause with a parameter");
2815 return false;
2818 else
2820 error_at (OMP_CLAUSE_LOCATION (c),
2821 "invalid depend kind in omp %<ordered%> %<depend%>");
2822 return false;
2825 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2826 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2828 /* ordered simd must be closely nested inside of simd region,
2829 and simd region must not encounter constructs other than
2830 ordered simd, therefore ordered simd may be either orphaned,
2831 or ctx->stmt must be simd. The latter case is handled already
2832 earlier. */
2833 if (ctx != NULL)
2835 error_at (gimple_location (stmt),
2836 "%<ordered%> %<simd%> must be closely nested inside "
2837 "%<simd%> region");
2838 return false;
2841 for (; ctx != NULL; ctx = ctx->outer)
2842 switch (gimple_code (ctx->stmt))
2844 case GIMPLE_OMP_CRITICAL:
2845 case GIMPLE_OMP_TASK:
2846 case GIMPLE_OMP_ORDERED:
2847 ordered_in_taskloop:
2848 error_at (gimple_location (stmt),
2849 "%<ordered%> region may not be closely nested inside "
2850 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2851 "%<taskloop%> region");
2852 return false;
2853 case GIMPLE_OMP_FOR:
2854 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2855 goto ordered_in_taskloop;
2856 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2857 OMP_CLAUSE_ORDERED) == NULL)
2859 error_at (gimple_location (stmt),
2860 "%<ordered%> region must be closely nested inside "
2861 "a loop region with an %<ordered%> clause");
2862 return false;
2864 return true;
2865 case GIMPLE_OMP_TARGET:
2866 if (gimple_omp_target_kind (ctx->stmt)
2867 != GF_OMP_TARGET_KIND_REGION)
2868 break;
2869 /* FALLTHRU */
2870 case GIMPLE_OMP_PARALLEL:
2871 case GIMPLE_OMP_TEAMS:
2872 error_at (gimple_location (stmt),
2873 "%<ordered%> region must be closely nested inside "
2874 "a loop region with an %<ordered%> clause");
2875 return false;
2876 default:
2877 break;
2879 break;
2880 case GIMPLE_OMP_CRITICAL:
2882 tree this_stmt_name
2883 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2884 for (; ctx != NULL; ctx = ctx->outer)
2885 if (gomp_critical *other_crit
2886 = dyn_cast <gomp_critical *> (ctx->stmt))
2887 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2889 error_at (gimple_location (stmt),
2890 "%<critical%> region may not be nested inside "
2891 "a %<critical%> region with the same name");
2892 return false;
2895 break;
2896 case GIMPLE_OMP_TEAMS:
2897 if (ctx == NULL
2898 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2899 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2901 error_at (gimple_location (stmt),
2902 "%<teams%> construct not closely nested inside of "
2903 "%<target%> construct");
2904 return false;
2906 break;
2907 case GIMPLE_OMP_TARGET:
2908 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2910 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2911 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2913 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2914 error_at (OMP_CLAUSE_LOCATION (c),
2915 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2916 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2917 return false;
2919 if (is_gimple_omp_offloaded (stmt)
2920 && oacc_get_fn_attrib (cfun->decl) != NULL)
2922 error_at (gimple_location (stmt),
2923 "OpenACC region inside of OpenACC routine, nested "
2924 "parallelism not supported yet");
2925 return false;
2927 for (; ctx != NULL; ctx = ctx->outer)
2929 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2931 if (is_gimple_omp (stmt)
2932 && is_gimple_omp_oacc (stmt)
2933 && is_gimple_omp (ctx->stmt))
2935 error_at (gimple_location (stmt),
2936 "OpenACC construct inside of non-OpenACC region");
2937 return false;
2939 continue;
2942 const char *stmt_name, *ctx_stmt_name;
2943 switch (gimple_omp_target_kind (stmt))
2945 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2946 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2947 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2948 case GF_OMP_TARGET_KIND_ENTER_DATA:
2949 stmt_name = "target enter data"; break;
2950 case GF_OMP_TARGET_KIND_EXIT_DATA:
2951 stmt_name = "target exit data"; break;
2952 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2953 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2954 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2955 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2956 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2957 stmt_name = "enter/exit data"; break;
2958 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2959 break;
2960 default: gcc_unreachable ();
2962 switch (gimple_omp_target_kind (ctx->stmt))
2964 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2965 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2966 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2967 ctx_stmt_name = "parallel"; break;
2968 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2969 ctx_stmt_name = "kernels"; break;
2970 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2971 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2972 ctx_stmt_name = "host_data"; break;
2973 default: gcc_unreachable ();
2976 /* OpenACC/OpenMP mismatch? */
2977 if (is_gimple_omp_oacc (stmt)
2978 != is_gimple_omp_oacc (ctx->stmt))
2980 error_at (gimple_location (stmt),
2981 "%s %qs construct inside of %s %qs region",
2982 (is_gimple_omp_oacc (stmt)
2983 ? "OpenACC" : "OpenMP"), stmt_name,
2984 (is_gimple_omp_oacc (ctx->stmt)
2985 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2986 return false;
2988 if (is_gimple_omp_offloaded (ctx->stmt))
2990 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2991 if (is_gimple_omp_oacc (ctx->stmt))
2993 error_at (gimple_location (stmt),
2994 "%qs construct inside of %qs region",
2995 stmt_name, ctx_stmt_name);
2996 return false;
2998 else
3000 warning_at (gimple_location (stmt), 0,
3001 "%qs construct inside of %qs region",
3002 stmt_name, ctx_stmt_name);
3006 break;
3007 default:
3008 break;
3010 return true;
3014 /* Helper function scan_omp.
3016 Callback for walk_tree or operators in walk_gimple_stmt used to
3017 scan for OMP directives in TP. */
3019 static tree
3020 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3022 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3023 omp_context *ctx = (omp_context *) wi->info;
3024 tree t = *tp;
3026 switch (TREE_CODE (t))
3028 case VAR_DECL:
3029 case PARM_DECL:
3030 case LABEL_DECL:
3031 case RESULT_DECL:
3032 if (ctx)
3034 tree repl = remap_decl (t, &ctx->cb);
3035 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3036 *tp = repl;
3038 break;
3040 default:
3041 if (ctx && TYPE_P (t))
3042 *tp = remap_type (t, &ctx->cb);
3043 else if (!DECL_P (t))
3045 *walk_subtrees = 1;
3046 if (ctx)
3048 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3049 if (tem != TREE_TYPE (t))
3051 if (TREE_CODE (t) == INTEGER_CST)
3052 *tp = wide_int_to_tree (tem, t);
3053 else
3054 TREE_TYPE (t) = tem;
3058 break;
3061 return NULL_TREE;
3064 /* Return true if FNDECL is a setjmp or a longjmp. */
3066 static bool
3067 setjmp_or_longjmp_p (const_tree fndecl)
3069 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3070 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3071 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3072 return true;
3074 tree declname = DECL_NAME (fndecl);
3075 if (!declname)
3076 return false;
3077 const char *name = IDENTIFIER_POINTER (declname);
3078 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3082 /* Helper function for scan_omp.
3084 Callback for walk_gimple_stmt used to scan for OMP directives in
3085 the current statement in GSI. */
3087 static tree
3088 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3089 struct walk_stmt_info *wi)
3091 gimple *stmt = gsi_stmt (*gsi);
3092 omp_context *ctx = (omp_context *) wi->info;
3094 if (gimple_has_location (stmt))
3095 input_location = gimple_location (stmt);
3097 /* Check the nesting restrictions. */
3098 bool remove = false;
3099 if (is_gimple_omp (stmt))
3100 remove = !check_omp_nesting_restrictions (stmt, ctx);
3101 else if (is_gimple_call (stmt))
3103 tree fndecl = gimple_call_fndecl (stmt);
3104 if (fndecl)
3106 if (setjmp_or_longjmp_p (fndecl)
3107 && ctx
3108 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3109 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3111 remove = true;
3112 error_at (gimple_location (stmt),
3113 "setjmp/longjmp inside simd construct");
3115 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3116 switch (DECL_FUNCTION_CODE (fndecl))
3118 case BUILT_IN_GOMP_BARRIER:
3119 case BUILT_IN_GOMP_CANCEL:
3120 case BUILT_IN_GOMP_CANCELLATION_POINT:
3121 case BUILT_IN_GOMP_TASKYIELD:
3122 case BUILT_IN_GOMP_TASKWAIT:
3123 case BUILT_IN_GOMP_TASKGROUP_START:
3124 case BUILT_IN_GOMP_TASKGROUP_END:
3125 remove = !check_omp_nesting_restrictions (stmt, ctx);
3126 break;
3127 default:
3128 break;
3132 if (remove)
3134 stmt = gimple_build_nop ();
3135 gsi_replace (gsi, stmt, false);
3138 *handled_ops_p = true;
3140 switch (gimple_code (stmt))
3142 case GIMPLE_OMP_PARALLEL:
3143 taskreg_nesting_level++;
3144 scan_omp_parallel (gsi, ctx);
3145 taskreg_nesting_level--;
3146 break;
3148 case GIMPLE_OMP_TASK:
3149 taskreg_nesting_level++;
3150 scan_omp_task (gsi, ctx);
3151 taskreg_nesting_level--;
3152 break;
3154 case GIMPLE_OMP_FOR:
3155 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3156 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3157 && omp_maybe_offloaded_ctx (ctx)
3158 && omp_max_simt_vf ())
3159 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3160 else
3161 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3162 break;
3164 case GIMPLE_OMP_SECTIONS:
3165 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3166 break;
3168 case GIMPLE_OMP_SINGLE:
3169 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3170 break;
3172 case GIMPLE_OMP_SECTION:
3173 case GIMPLE_OMP_MASTER:
3174 case GIMPLE_OMP_TASKGROUP:
3175 case GIMPLE_OMP_ORDERED:
3176 case GIMPLE_OMP_CRITICAL:
3177 case GIMPLE_OMP_GRID_BODY:
3178 ctx = new_omp_context (stmt, ctx);
3179 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3180 break;
3182 case GIMPLE_OMP_TARGET:
3183 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3184 break;
3186 case GIMPLE_OMP_TEAMS:
3187 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3188 break;
3190 case GIMPLE_BIND:
3192 tree var;
3194 *handled_ops_p = false;
3195 if (ctx)
3196 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3197 var ;
3198 var = DECL_CHAIN (var))
3199 insert_decl_map (&ctx->cb, var, var);
3201 break;
3202 default:
3203 *handled_ops_p = false;
3204 break;
3207 return NULL_TREE;
3211 /* Scan all the statements starting at the current statement. CTX
3212 contains context information about the OMP directives and
3213 clauses found during the scan. */
3215 static void
3216 scan_omp (gimple_seq *body_p, omp_context *ctx)
3218 location_t saved_location;
3219 struct walk_stmt_info wi;
3221 memset (&wi, 0, sizeof (wi));
3222 wi.info = ctx;
3223 wi.want_locations = true;
3225 saved_location = input_location;
3226 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3227 input_location = saved_location;
3230 /* Re-gimplification and code generation routines. */
3232 /* If a context was created for STMT when it was scanned, return it. */
3234 static omp_context *
3235 maybe_lookup_ctx (gimple *stmt)
3237 splay_tree_node n;
3238 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3239 return n ? (omp_context *) n->value : NULL;
3243 /* Find the mapping for DECL in CTX or the immediately enclosing
3244 context that has a mapping for DECL.
3246 If CTX is a nested parallel directive, we may have to use the decl
3247 mappings created in CTX's parent context. Suppose that we have the
3248 following parallel nesting (variable UIDs showed for clarity):
3250 iD.1562 = 0;
3251 #omp parallel shared(iD.1562) -> outer parallel
3252 iD.1562 = iD.1562 + 1;
3254 #omp parallel shared (iD.1562) -> inner parallel
3255 iD.1562 = iD.1562 - 1;
3257 Each parallel structure will create a distinct .omp_data_s structure
3258 for copying iD.1562 in/out of the directive:
3260 outer parallel .omp_data_s.1.i -> iD.1562
3261 inner parallel .omp_data_s.2.i -> iD.1562
3263 A shared variable mapping will produce a copy-out operation before
3264 the parallel directive and a copy-in operation after it. So, in
3265 this case we would have:
3267 iD.1562 = 0;
3268 .omp_data_o.1.i = iD.1562;
3269 #omp parallel shared(iD.1562) -> outer parallel
3270 .omp_data_i.1 = &.omp_data_o.1
3271 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3273 .omp_data_o.2.i = iD.1562; -> **
3274 #omp parallel shared(iD.1562) -> inner parallel
3275 .omp_data_i.2 = &.omp_data_o.2
3276 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3279 ** This is a problem. The symbol iD.1562 cannot be referenced
3280 inside the body of the outer parallel region. But since we are
3281 emitting this copy operation while expanding the inner parallel
3282 directive, we need to access the CTX structure of the outer
3283 parallel directive to get the correct mapping:
3285 .omp_data_o.2.i = .omp_data_i.1->i
3287 Since there may be other workshare or parallel directives enclosing
3288 the parallel directive, it may be necessary to walk up the context
3289 parent chain. This is not a problem in general because nested
3290 parallelism happens only rarely. */
3292 static tree
3293 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3295 tree t;
3296 omp_context *up;
3298 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3299 t = maybe_lookup_decl (decl, up);
3301 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3303 return t ? t : decl;
3307 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3308 in outer contexts. */
3310 static tree
3311 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3313 tree t = NULL;
3314 omp_context *up;
3316 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3317 t = maybe_lookup_decl (decl, up);
3319 return t ? t : decl;
3323 /* Construct the initialization value for reduction operation OP. */
3325 tree
3326 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3328 switch (op)
3330 case PLUS_EXPR:
3331 case MINUS_EXPR:
3332 case BIT_IOR_EXPR:
3333 case BIT_XOR_EXPR:
3334 case TRUTH_OR_EXPR:
3335 case TRUTH_ORIF_EXPR:
3336 case TRUTH_XOR_EXPR:
3337 case NE_EXPR:
3338 return build_zero_cst (type);
3340 case MULT_EXPR:
3341 case TRUTH_AND_EXPR:
3342 case TRUTH_ANDIF_EXPR:
3343 case EQ_EXPR:
3344 return fold_convert_loc (loc, type, integer_one_node);
3346 case BIT_AND_EXPR:
3347 return fold_convert_loc (loc, type, integer_minus_one_node);
3349 case MAX_EXPR:
3350 if (SCALAR_FLOAT_TYPE_P (type))
3352 REAL_VALUE_TYPE max, min;
3353 if (HONOR_INFINITIES (type))
3355 real_inf (&max);
3356 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3358 else
3359 real_maxval (&min, 1, TYPE_MODE (type));
3360 return build_real (type, min);
3362 else if (POINTER_TYPE_P (type))
3364 wide_int min
3365 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3366 return wide_int_to_tree (type, min);
3368 else
3370 gcc_assert (INTEGRAL_TYPE_P (type));
3371 return TYPE_MIN_VALUE (type);
3374 case MIN_EXPR:
3375 if (SCALAR_FLOAT_TYPE_P (type))
3377 REAL_VALUE_TYPE max;
3378 if (HONOR_INFINITIES (type))
3379 real_inf (&max);
3380 else
3381 real_maxval (&max, 0, TYPE_MODE (type));
3382 return build_real (type, max);
3384 else if (POINTER_TYPE_P (type))
3386 wide_int max
3387 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3388 return wide_int_to_tree (type, max);
3390 else
3392 gcc_assert (INTEGRAL_TYPE_P (type));
3393 return TYPE_MAX_VALUE (type);
3396 default:
3397 gcc_unreachable ();
3401 /* Construct the initialization value for reduction CLAUSE. */
3403 tree
3404 omp_reduction_init (tree clause, tree type)
3406 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3407 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3410 /* Return alignment to be assumed for var in CLAUSE, which should be
3411 OMP_CLAUSE_ALIGNED. */
3413 static tree
3414 omp_clause_aligned_alignment (tree clause)
3416 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3417 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3419 /* Otherwise return implementation defined alignment. */
3420 unsigned int al = 1;
3421 machine_mode mode, vmode;
3422 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3423 if (vs)
3424 vs = 1 << floor_log2 (vs);
3425 static enum mode_class classes[]
3426 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3427 for (int i = 0; i < 4; i += 2)
3428 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3429 mode != VOIDmode;
3430 mode = GET_MODE_WIDER_MODE (mode))
3432 vmode = targetm.vectorize.preferred_simd_mode (mode);
3433 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3434 continue;
3435 while (vs
3436 && GET_MODE_SIZE (vmode) < vs
3437 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3438 vmode = GET_MODE_2XWIDER_MODE (vmode);
3440 tree type = lang_hooks.types.type_for_mode (mode, 1);
3441 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3442 continue;
3443 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3444 / GET_MODE_SIZE (mode));
3445 if (TYPE_MODE (type) != vmode)
3446 continue;
3447 if (TYPE_ALIGN_UNIT (type) > al)
3448 al = TYPE_ALIGN_UNIT (type);
3450 return build_int_cst (integer_type_node, al);
3454 /* This structure is part of the interface between lower_rec_simd_input_clauses
3455 and lower_rec_input_clauses. */
3457 struct omplow_simd_context {
3458 tree idx;
3459 tree lane;
3460 int max_vf;
3461 bool is_simt;
3464 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3465 privatization. */
3467 static bool
3468 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3469 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3471 if (sctx->max_vf == 0)
3473 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3474 if (sctx->max_vf > 1)
3476 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3477 OMP_CLAUSE_SAFELEN);
3478 if (c
3479 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3480 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3481 sctx->max_vf = 1;
3482 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3483 sctx->max_vf) == -1)
3484 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3486 if (sctx->max_vf > 1)
3488 sctx->idx = create_tmp_var (unsigned_type_node);
3489 sctx->lane = create_tmp_var (unsigned_type_node);
3492 if (sctx->max_vf == 1)
3493 return false;
3495 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3496 tree avar = create_tmp_var_raw (atype);
3497 if (TREE_ADDRESSABLE (new_var))
3498 TREE_ADDRESSABLE (avar) = 1;
3499 DECL_ATTRIBUTES (avar)
3500 = tree_cons (get_identifier ("omp simd array"), NULL,
3501 DECL_ATTRIBUTES (avar));
3502 gimple_add_tmp_var (avar);
3503 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3504 NULL_TREE, NULL_TREE);
3505 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3506 NULL_TREE, NULL_TREE);
3507 if (DECL_P (new_var))
3509 SET_DECL_VALUE_EXPR (new_var, lvar);
3510 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3512 return true;
3515 /* Helper function of lower_rec_input_clauses. For a reference
3516 in simd reduction, add an underlying variable it will reference. */
3518 static void
3519 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3521 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3522 if (TREE_CONSTANT (z))
3524 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3525 get_name (new_vard));
3526 gimple_add_tmp_var (z);
3527 TREE_ADDRESSABLE (z) = 1;
3528 z = build_fold_addr_expr_loc (loc, z);
3529 gimplify_assign (new_vard, z, ilist);
3533 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3534 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3535 private variables. Initialization statements go in ILIST, while calls
3536 to destructors go in DLIST. */
3538 static void
3539 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3540 omp_context *ctx, struct omp_for_data *fd)
3542 tree c, dtor, copyin_seq, x, ptr;
3543 bool copyin_by_ref = false;
3544 bool lastprivate_firstprivate = false;
3545 bool reduction_omp_orig_ref = false;
3546 int pass;
3547 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3548 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3549 omplow_simd_context sctx = omplow_simd_context ();
3550 tree simt_lane = NULL_TREE;
3551 tree ivar = NULL_TREE, lvar = NULL_TREE;
3552 gimple_seq llist[3] = { };
3554 copyin_seq = NULL;
3555 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3557 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3558 with data sharing clauses referencing variable sized vars. That
3559 is unnecessarily hard to support and very unlikely to result in
3560 vectorized code anyway. */
3561 if (is_simd)
3562 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3563 switch (OMP_CLAUSE_CODE (c))
3565 case OMP_CLAUSE_LINEAR:
3566 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3567 sctx.max_vf = 1;
3568 /* FALLTHRU */
3569 case OMP_CLAUSE_PRIVATE:
3570 case OMP_CLAUSE_FIRSTPRIVATE:
3571 case OMP_CLAUSE_LASTPRIVATE:
3572 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3573 sctx.max_vf = 1;
3574 break;
3575 case OMP_CLAUSE_REDUCTION:
3576 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3577 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3578 sctx.max_vf = 1;
3579 break;
3580 default:
3581 continue;
3584 /* Do all the fixed sized types in the first pass, and the variable sized
3585 types in the second pass. This makes sure that the scalar arguments to
3586 the variable sized types are processed before we use them in the
3587 variable sized operations. */
3588 for (pass = 0; pass < 2; ++pass)
3590 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3592 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3593 tree var, new_var;
3594 bool by_ref;
3595 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3597 switch (c_kind)
3599 case OMP_CLAUSE_PRIVATE:
3600 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3601 continue;
3602 break;
3603 case OMP_CLAUSE_SHARED:
3604 /* Ignore shared directives in teams construct. */
3605 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3606 continue;
3607 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3609 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3610 || is_global_var (OMP_CLAUSE_DECL (c)));
3611 continue;
3613 case OMP_CLAUSE_FIRSTPRIVATE:
3614 case OMP_CLAUSE_COPYIN:
3615 break;
3616 case OMP_CLAUSE_LINEAR:
3617 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3618 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3619 lastprivate_firstprivate = true;
3620 break;
3621 case OMP_CLAUSE_REDUCTION:
3622 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3623 reduction_omp_orig_ref = true;
3624 break;
3625 case OMP_CLAUSE__LOOPTEMP_:
3626 /* Handle _looptemp_ clauses only on parallel/task. */
3627 if (fd)
3628 continue;
3629 break;
3630 case OMP_CLAUSE_LASTPRIVATE:
3631 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3633 lastprivate_firstprivate = true;
3634 if (pass != 0 || is_taskloop_ctx (ctx))
3635 continue;
3637 /* Even without corresponding firstprivate, if
3638 decl is Fortran allocatable, it needs outer var
3639 reference. */
3640 else if (pass == 0
3641 && lang_hooks.decls.omp_private_outer_ref
3642 (OMP_CLAUSE_DECL (c)))
3643 lastprivate_firstprivate = true;
3644 break;
3645 case OMP_CLAUSE_ALIGNED:
3646 if (pass == 0)
3647 continue;
3648 var = OMP_CLAUSE_DECL (c);
3649 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3650 && !is_global_var (var))
3652 new_var = maybe_lookup_decl (var, ctx);
3653 if (new_var == NULL_TREE)
3654 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3655 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3656 tree alarg = omp_clause_aligned_alignment (c);
3657 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3658 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3659 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3660 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3661 gimplify_and_add (x, ilist);
3663 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3664 && is_global_var (var))
3666 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3667 new_var = lookup_decl (var, ctx);
3668 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3669 t = build_fold_addr_expr_loc (clause_loc, t);
3670 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3671 tree alarg = omp_clause_aligned_alignment (c);
3672 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3673 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3674 t = fold_convert_loc (clause_loc, ptype, t);
3675 x = create_tmp_var (ptype);
3676 t = build2 (MODIFY_EXPR, ptype, x, t);
3677 gimplify_and_add (t, ilist);
3678 t = build_simple_mem_ref_loc (clause_loc, x);
3679 SET_DECL_VALUE_EXPR (new_var, t);
3680 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3682 continue;
3683 default:
3684 continue;
3687 new_var = var = OMP_CLAUSE_DECL (c);
3688 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3690 var = TREE_OPERAND (var, 0);
3691 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3692 var = TREE_OPERAND (var, 0);
3693 if (TREE_CODE (var) == INDIRECT_REF
3694 || TREE_CODE (var) == ADDR_EXPR)
3695 var = TREE_OPERAND (var, 0);
3696 if (is_variable_sized (var))
3698 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3699 var = DECL_VALUE_EXPR (var);
3700 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3701 var = TREE_OPERAND (var, 0);
3702 gcc_assert (DECL_P (var));
3704 new_var = var;
3706 if (c_kind != OMP_CLAUSE_COPYIN)
3707 new_var = lookup_decl (var, ctx);
3709 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3711 if (pass != 0)
3712 continue;
3714 /* C/C++ array section reductions. */
3715 else if (c_kind == OMP_CLAUSE_REDUCTION
3716 && var != OMP_CLAUSE_DECL (c))
3718 if (pass == 0)
3719 continue;
3721 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3722 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3723 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3725 tree b = TREE_OPERAND (orig_var, 1);
3726 b = maybe_lookup_decl (b, ctx);
3727 if (b == NULL)
3729 b = TREE_OPERAND (orig_var, 1);
3730 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3732 if (integer_zerop (bias))
3733 bias = b;
3734 else
3736 bias = fold_convert_loc (clause_loc,
3737 TREE_TYPE (b), bias);
3738 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3739 TREE_TYPE (b), b, bias);
3741 orig_var = TREE_OPERAND (orig_var, 0);
3743 if (TREE_CODE (orig_var) == INDIRECT_REF
3744 || TREE_CODE (orig_var) == ADDR_EXPR)
3745 orig_var = TREE_OPERAND (orig_var, 0);
3746 tree d = OMP_CLAUSE_DECL (c);
3747 tree type = TREE_TYPE (d);
3748 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3749 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3750 const char *name = get_name (orig_var);
3751 if (TREE_CONSTANT (v))
3753 x = create_tmp_var_raw (type, name);
3754 gimple_add_tmp_var (x);
3755 TREE_ADDRESSABLE (x) = 1;
3756 x = build_fold_addr_expr_loc (clause_loc, x);
3758 else
3760 tree atmp
3761 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3762 tree t = maybe_lookup_decl (v, ctx);
3763 if (t)
3764 v = t;
3765 else
3766 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3767 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3768 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3769 TREE_TYPE (v), v,
3770 build_int_cst (TREE_TYPE (v), 1));
3771 t = fold_build2_loc (clause_loc, MULT_EXPR,
3772 TREE_TYPE (v), t,
3773 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3774 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3775 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3778 tree ptype = build_pointer_type (TREE_TYPE (type));
3779 x = fold_convert_loc (clause_loc, ptype, x);
3780 tree y = create_tmp_var (ptype, name);
3781 gimplify_assign (y, x, ilist);
3782 x = y;
3783 tree yb = y;
3785 if (!integer_zerop (bias))
3787 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3788 bias);
3789 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3791 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3792 pointer_sized_int_node, yb, bias);
3793 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3794 yb = create_tmp_var (ptype, name);
3795 gimplify_assign (yb, x, ilist);
3796 x = yb;
3799 d = TREE_OPERAND (d, 0);
3800 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3801 d = TREE_OPERAND (d, 0);
3802 if (TREE_CODE (d) == ADDR_EXPR)
3804 if (orig_var != var)
3806 gcc_assert (is_variable_sized (orig_var));
3807 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3809 gimplify_assign (new_var, x, ilist);
3810 tree new_orig_var = lookup_decl (orig_var, ctx);
3811 tree t = build_fold_indirect_ref (new_var);
3812 DECL_IGNORED_P (new_var) = 0;
3813 TREE_THIS_NOTRAP (t);
3814 SET_DECL_VALUE_EXPR (new_orig_var, t);
3815 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3817 else
3819 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3820 build_int_cst (ptype, 0));
3821 SET_DECL_VALUE_EXPR (new_var, x);
3822 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3825 else
3827 gcc_assert (orig_var == var);
3828 if (TREE_CODE (d) == INDIRECT_REF)
3830 x = create_tmp_var (ptype, name);
3831 TREE_ADDRESSABLE (x) = 1;
3832 gimplify_assign (x, yb, ilist);
3833 x = build_fold_addr_expr_loc (clause_loc, x);
3835 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3836 gimplify_assign (new_var, x, ilist);
3838 tree y1 = create_tmp_var (ptype, NULL);
3839 gimplify_assign (y1, y, ilist);
3840 tree i2 = NULL_TREE, y2 = NULL_TREE;
3841 tree body2 = NULL_TREE, end2 = NULL_TREE;
3842 tree y3 = NULL_TREE, y4 = NULL_TREE;
3843 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3845 y2 = create_tmp_var (ptype, NULL);
3846 gimplify_assign (y2, y, ilist);
3847 tree ref = build_outer_var_ref (var, ctx);
3848 /* For ref build_outer_var_ref already performs this. */
3849 if (TREE_CODE (d) == INDIRECT_REF)
3850 gcc_assert (omp_is_reference (var));
3851 else if (TREE_CODE (d) == ADDR_EXPR)
3852 ref = build_fold_addr_expr (ref);
3853 else if (omp_is_reference (var))
3854 ref = build_fold_addr_expr (ref);
3855 ref = fold_convert_loc (clause_loc, ptype, ref);
3856 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3857 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3859 y3 = create_tmp_var (ptype, NULL);
3860 gimplify_assign (y3, unshare_expr (ref), ilist);
3862 if (is_simd)
3864 y4 = create_tmp_var (ptype, NULL);
3865 gimplify_assign (y4, ref, dlist);
3868 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3869 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3870 tree body = create_artificial_label (UNKNOWN_LOCATION);
3871 tree end = create_artificial_label (UNKNOWN_LOCATION);
3872 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3873 if (y2)
3875 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3876 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3877 body2 = create_artificial_label (UNKNOWN_LOCATION);
3878 end2 = create_artificial_label (UNKNOWN_LOCATION);
3879 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3881 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3883 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3884 tree decl_placeholder
3885 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3886 SET_DECL_VALUE_EXPR (decl_placeholder,
3887 build_simple_mem_ref (y1));
3888 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3889 SET_DECL_VALUE_EXPR (placeholder,
3890 y3 ? build_simple_mem_ref (y3)
3891 : error_mark_node);
3892 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3893 x = lang_hooks.decls.omp_clause_default_ctor
3894 (c, build_simple_mem_ref (y1),
3895 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3896 if (x)
3897 gimplify_and_add (x, ilist);
3898 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3900 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3901 lower_omp (&tseq, ctx);
3902 gimple_seq_add_seq (ilist, tseq);
3904 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3905 if (is_simd)
3907 SET_DECL_VALUE_EXPR (decl_placeholder,
3908 build_simple_mem_ref (y2));
3909 SET_DECL_VALUE_EXPR (placeholder,
3910 build_simple_mem_ref (y4));
3911 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3912 lower_omp (&tseq, ctx);
3913 gimple_seq_add_seq (dlist, tseq);
3914 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3916 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3917 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3918 x = lang_hooks.decls.omp_clause_dtor
3919 (c, build_simple_mem_ref (y2));
3920 if (x)
3922 gimple_seq tseq = NULL;
3923 dtor = x;
3924 gimplify_stmt (&dtor, &tseq);
3925 gimple_seq_add_seq (dlist, tseq);
3928 else
3930 x = omp_reduction_init (c, TREE_TYPE (type));
3931 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3933 /* reduction(-:var) sums up the partial results, so it
3934 acts identically to reduction(+:var). */
3935 if (code == MINUS_EXPR)
3936 code = PLUS_EXPR;
3938 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3939 if (is_simd)
3941 x = build2 (code, TREE_TYPE (type),
3942 build_simple_mem_ref (y4),
3943 build_simple_mem_ref (y2));
3944 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3947 gimple *g
3948 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3949 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3950 gimple_seq_add_stmt (ilist, g);
3951 if (y3)
3953 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3954 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3955 gimple_seq_add_stmt (ilist, g);
3957 g = gimple_build_assign (i, PLUS_EXPR, i,
3958 build_int_cst (TREE_TYPE (i), 1));
3959 gimple_seq_add_stmt (ilist, g);
3960 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3961 gimple_seq_add_stmt (ilist, g);
3962 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3963 if (y2)
3965 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3966 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3967 gimple_seq_add_stmt (dlist, g);
3968 if (y4)
3970 g = gimple_build_assign
3971 (y4, POINTER_PLUS_EXPR, y4,
3972 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3973 gimple_seq_add_stmt (dlist, g);
3975 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3976 build_int_cst (TREE_TYPE (i2), 1));
3977 gimple_seq_add_stmt (dlist, g);
3978 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3979 gimple_seq_add_stmt (dlist, g);
3980 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3982 continue;
3984 else if (is_variable_sized (var))
3986 /* For variable sized types, we need to allocate the
3987 actual storage here. Call alloca and store the
3988 result in the pointer decl that we created elsewhere. */
3989 if (pass == 0)
3990 continue;
3992 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3994 gcall *stmt;
3995 tree tmp, atmp;
3997 ptr = DECL_VALUE_EXPR (new_var);
3998 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3999 ptr = TREE_OPERAND (ptr, 0);
4000 gcc_assert (DECL_P (ptr));
4001 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4003 /* void *tmp = __builtin_alloca */
4004 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4005 stmt = gimple_build_call (atmp, 2, x,
4006 size_int (DECL_ALIGN (var)));
4007 tmp = create_tmp_var_raw (ptr_type_node);
4008 gimple_add_tmp_var (tmp);
4009 gimple_call_set_lhs (stmt, tmp);
4011 gimple_seq_add_stmt (ilist, stmt);
4013 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4014 gimplify_assign (ptr, x, ilist);
4017 else if (omp_is_reference (var))
4019 /* For references that are being privatized for Fortran,
4020 allocate new backing storage for the new pointer
4021 variable. This allows us to avoid changing all the
4022 code that expects a pointer to something that expects
4023 a direct variable. */
4024 if (pass == 0)
4025 continue;
4027 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4028 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4030 x = build_receiver_ref (var, false, ctx);
4031 x = build_fold_addr_expr_loc (clause_loc, x);
4033 else if (TREE_CONSTANT (x))
4035 /* For reduction in SIMD loop, defer adding the
4036 initialization of the reference, because if we decide
4037 to use SIMD array for it, the initilization could cause
4038 expansion ICE. */
4039 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4040 x = NULL_TREE;
4041 else
4043 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4044 get_name (var));
4045 gimple_add_tmp_var (x);
4046 TREE_ADDRESSABLE (x) = 1;
4047 x = build_fold_addr_expr_loc (clause_loc, x);
4050 else
4052 tree atmp
4053 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4054 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4055 tree al = size_int (TYPE_ALIGN (rtype));
4056 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4059 if (x)
4061 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4062 gimplify_assign (new_var, x, ilist);
4065 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4067 else if (c_kind == OMP_CLAUSE_REDUCTION
4068 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4070 if (pass == 0)
4071 continue;
4073 else if (pass != 0)
4074 continue;
4076 switch (OMP_CLAUSE_CODE (c))
4078 case OMP_CLAUSE_SHARED:
4079 /* Ignore shared directives in teams construct. */
4080 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4081 continue;
4082 /* Shared global vars are just accessed directly. */
4083 if (is_global_var (new_var))
4084 break;
4085 /* For taskloop firstprivate/lastprivate, represented
4086 as firstprivate and shared clause on the task, new_var
4087 is the firstprivate var. */
4088 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4089 break;
4090 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4091 needs to be delayed until after fixup_child_record_type so
4092 that we get the correct type during the dereference. */
4093 by_ref = use_pointer_for_field (var, ctx);
4094 x = build_receiver_ref (var, by_ref, ctx);
4095 SET_DECL_VALUE_EXPR (new_var, x);
4096 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4098 /* ??? If VAR is not passed by reference, and the variable
4099 hasn't been initialized yet, then we'll get a warning for
4100 the store into the omp_data_s structure. Ideally, we'd be
4101 able to notice this and not store anything at all, but
4102 we're generating code too early. Suppress the warning. */
4103 if (!by_ref)
4104 TREE_NO_WARNING (var) = 1;
4105 break;
4107 case OMP_CLAUSE_LASTPRIVATE:
4108 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4109 break;
4110 /* FALLTHRU */
4112 case OMP_CLAUSE_PRIVATE:
4113 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4114 x = build_outer_var_ref (var, ctx);
4115 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4117 if (is_task_ctx (ctx))
4118 x = build_receiver_ref (var, false, ctx);
4119 else
4120 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4122 else
4123 x = NULL;
4124 do_private:
4125 tree nx;
4126 nx = lang_hooks.decls.omp_clause_default_ctor
4127 (c, unshare_expr (new_var), x);
4128 if (is_simd)
4130 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4131 if ((TREE_ADDRESSABLE (new_var) || nx || y
4132 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4133 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4134 ivar, lvar))
4136 if (nx)
4137 x = lang_hooks.decls.omp_clause_default_ctor
4138 (c, unshare_expr (ivar), x);
4139 if (nx && x)
4140 gimplify_and_add (x, &llist[0]);
4141 if (y)
4143 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4144 if (y)
4146 gimple_seq tseq = NULL;
4148 dtor = y;
4149 gimplify_stmt (&dtor, &tseq);
4150 gimple_seq_add_seq (&llist[1], tseq);
4153 break;
4156 if (nx)
4157 gimplify_and_add (nx, ilist);
4158 /* FALLTHRU */
4160 do_dtor:
4161 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4162 if (x)
4164 gimple_seq tseq = NULL;
4166 dtor = x;
4167 gimplify_stmt (&dtor, &tseq);
4168 gimple_seq_add_seq (dlist, tseq);
4170 break;
4172 case OMP_CLAUSE_LINEAR:
4173 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4174 goto do_firstprivate;
4175 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4176 x = NULL;
4177 else
4178 x = build_outer_var_ref (var, ctx);
4179 goto do_private;
4181 case OMP_CLAUSE_FIRSTPRIVATE:
4182 if (is_task_ctx (ctx))
4184 if (omp_is_reference (var) || is_variable_sized (var))
4185 goto do_dtor;
4186 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4187 ctx))
4188 || use_pointer_for_field (var, NULL))
4190 x = build_receiver_ref (var, false, ctx);
4191 SET_DECL_VALUE_EXPR (new_var, x);
4192 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4193 goto do_dtor;
4196 do_firstprivate:
4197 x = build_outer_var_ref (var, ctx);
4198 if (is_simd)
4200 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4201 && gimple_omp_for_combined_into_p (ctx->stmt))
4203 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4204 tree stept = TREE_TYPE (t);
4205 tree ct = omp_find_clause (clauses,
4206 OMP_CLAUSE__LOOPTEMP_);
4207 gcc_assert (ct);
4208 tree l = OMP_CLAUSE_DECL (ct);
4209 tree n1 = fd->loop.n1;
4210 tree step = fd->loop.step;
4211 tree itype = TREE_TYPE (l);
4212 if (POINTER_TYPE_P (itype))
4213 itype = signed_type_for (itype);
4214 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4215 if (TYPE_UNSIGNED (itype)
4216 && fd->loop.cond_code == GT_EXPR)
4217 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4218 fold_build1 (NEGATE_EXPR, itype, l),
4219 fold_build1 (NEGATE_EXPR,
4220 itype, step));
4221 else
4222 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4223 t = fold_build2 (MULT_EXPR, stept,
4224 fold_convert (stept, l), t);
4226 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4228 x = lang_hooks.decls.omp_clause_linear_ctor
4229 (c, new_var, x, t);
4230 gimplify_and_add (x, ilist);
4231 goto do_dtor;
4234 if (POINTER_TYPE_P (TREE_TYPE (x)))
4235 x = fold_build2 (POINTER_PLUS_EXPR,
4236 TREE_TYPE (x), x, t);
4237 else
4238 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4241 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4242 || TREE_ADDRESSABLE (new_var))
4243 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4244 ivar, lvar))
4246 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4248 tree iv = create_tmp_var (TREE_TYPE (new_var));
4249 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4250 gimplify_and_add (x, ilist);
4251 gimple_stmt_iterator gsi
4252 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4253 gassign *g
4254 = gimple_build_assign (unshare_expr (lvar), iv);
4255 gsi_insert_before_without_update (&gsi, g,
4256 GSI_SAME_STMT);
4257 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4258 enum tree_code code = PLUS_EXPR;
4259 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4260 code = POINTER_PLUS_EXPR;
4261 g = gimple_build_assign (iv, code, iv, t);
4262 gsi_insert_before_without_update (&gsi, g,
4263 GSI_SAME_STMT);
4264 break;
4266 x = lang_hooks.decls.omp_clause_copy_ctor
4267 (c, unshare_expr (ivar), x);
4268 gimplify_and_add (x, &llist[0]);
4269 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4270 if (x)
4272 gimple_seq tseq = NULL;
4274 dtor = x;
4275 gimplify_stmt (&dtor, &tseq);
4276 gimple_seq_add_seq (&llist[1], tseq);
4278 break;
4281 x = lang_hooks.decls.omp_clause_copy_ctor
4282 (c, unshare_expr (new_var), x);
4283 gimplify_and_add (x, ilist);
4284 goto do_dtor;
4286 case OMP_CLAUSE__LOOPTEMP_:
4287 gcc_assert (is_taskreg_ctx (ctx));
4288 x = build_outer_var_ref (var, ctx);
4289 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4290 gimplify_and_add (x, ilist);
4291 break;
4293 case OMP_CLAUSE_COPYIN:
4294 by_ref = use_pointer_for_field (var, NULL);
4295 x = build_receiver_ref (var, by_ref, ctx);
4296 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4297 append_to_statement_list (x, &copyin_seq);
4298 copyin_by_ref |= by_ref;
4299 break;
4301 case OMP_CLAUSE_REDUCTION:
4302 /* OpenACC reductions are initialized using the
4303 GOACC_REDUCTION internal function. */
4304 if (is_gimple_omp_oacc (ctx->stmt))
4305 break;
4306 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4308 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4309 gimple *tseq;
4310 x = build_outer_var_ref (var, ctx);
4312 if (omp_is_reference (var)
4313 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4314 TREE_TYPE (x)))
4315 x = build_fold_addr_expr_loc (clause_loc, x);
4316 SET_DECL_VALUE_EXPR (placeholder, x);
4317 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4318 tree new_vard = new_var;
4319 if (omp_is_reference (var))
4321 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4322 new_vard = TREE_OPERAND (new_var, 0);
4323 gcc_assert (DECL_P (new_vard));
4325 if (is_simd
4326 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4327 ivar, lvar))
4329 if (new_vard == new_var)
4331 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4332 SET_DECL_VALUE_EXPR (new_var, ivar);
4334 else
4336 SET_DECL_VALUE_EXPR (new_vard,
4337 build_fold_addr_expr (ivar));
4338 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4340 x = lang_hooks.decls.omp_clause_default_ctor
4341 (c, unshare_expr (ivar),
4342 build_outer_var_ref (var, ctx));
4343 if (x)
4344 gimplify_and_add (x, &llist[0]);
4345 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4347 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4348 lower_omp (&tseq, ctx);
4349 gimple_seq_add_seq (&llist[0], tseq);
4351 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4352 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4353 lower_omp (&tseq, ctx);
4354 gimple_seq_add_seq (&llist[1], tseq);
4355 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4356 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4357 if (new_vard == new_var)
4358 SET_DECL_VALUE_EXPR (new_var, lvar);
4359 else
4360 SET_DECL_VALUE_EXPR (new_vard,
4361 build_fold_addr_expr (lvar));
4362 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4363 if (x)
4365 tseq = NULL;
4366 dtor = x;
4367 gimplify_stmt (&dtor, &tseq);
4368 gimple_seq_add_seq (&llist[1], tseq);
4370 break;
4372 /* If this is a reference to constant size reduction var
4373 with placeholder, we haven't emitted the initializer
4374 for it because it is undesirable if SIMD arrays are used.
4375 But if they aren't used, we need to emit the deferred
4376 initialization now. */
4377 else if (omp_is_reference (var) && is_simd)
4378 handle_simd_reference (clause_loc, new_vard, ilist);
4379 x = lang_hooks.decls.omp_clause_default_ctor
4380 (c, unshare_expr (new_var),
4381 build_outer_var_ref (var, ctx));
4382 if (x)
4383 gimplify_and_add (x, ilist);
4384 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4386 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4387 lower_omp (&tseq, ctx);
4388 gimple_seq_add_seq (ilist, tseq);
4390 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4391 if (is_simd)
4393 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4394 lower_omp (&tseq, ctx);
4395 gimple_seq_add_seq (dlist, tseq);
4396 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4398 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4399 goto do_dtor;
4401 else
4403 x = omp_reduction_init (c, TREE_TYPE (new_var));
4404 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4405 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4407 /* reduction(-:var) sums up the partial results, so it
4408 acts identically to reduction(+:var). */
4409 if (code == MINUS_EXPR)
4410 code = PLUS_EXPR;
4412 tree new_vard = new_var;
4413 if (is_simd && omp_is_reference (var))
4415 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4416 new_vard = TREE_OPERAND (new_var, 0);
4417 gcc_assert (DECL_P (new_vard));
4419 if (is_simd
4420 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4421 ivar, lvar))
4423 tree ref = build_outer_var_ref (var, ctx);
4425 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4427 if (sctx.is_simt)
4429 if (!simt_lane)
4430 simt_lane = create_tmp_var (unsigned_type_node);
4431 x = build_call_expr_internal_loc
4432 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4433 TREE_TYPE (ivar), 2, ivar, simt_lane);
4434 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4435 gimplify_assign (ivar, x, &llist[2]);
4437 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4438 ref = build_outer_var_ref (var, ctx);
4439 gimplify_assign (ref, x, &llist[1]);
4441 if (new_vard != new_var)
4443 SET_DECL_VALUE_EXPR (new_vard,
4444 build_fold_addr_expr (lvar));
4445 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4448 else
4450 if (omp_is_reference (var) && is_simd)
4451 handle_simd_reference (clause_loc, new_vard, ilist);
4452 gimplify_assign (new_var, x, ilist);
4453 if (is_simd)
4455 tree ref = build_outer_var_ref (var, ctx);
4457 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4458 ref = build_outer_var_ref (var, ctx);
4459 gimplify_assign (ref, x, dlist);
4463 break;
4465 default:
4466 gcc_unreachable ();
4471 if (sctx.lane)
4473 tree uid = create_tmp_var (ptr_type_node, "simduid");
4474 /* Don't want uninit warnings on simduid, it is always uninitialized,
4475 but we use it not for the value, but for the DECL_UID only. */
4476 TREE_NO_WARNING (uid) = 1;
4477 gimple *g
4478 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4479 gimple_call_set_lhs (g, sctx.lane);
4480 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4481 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4482 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4483 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4484 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4485 gimple_omp_for_set_clauses (ctx->stmt, c);
4486 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4487 build_int_cst (unsigned_type_node, 0));
4488 gimple_seq_add_stmt (ilist, g);
4489 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4490 if (llist[2])
4492 tree simt_vf = create_tmp_var (unsigned_type_node);
4493 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4494 gimple_call_set_lhs (g, simt_vf);
4495 gimple_seq_add_stmt (dlist, g);
4497 tree t = build_int_cst (unsigned_type_node, 1);
4498 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4499 gimple_seq_add_stmt (dlist, g);
4501 t = build_int_cst (unsigned_type_node, 0);
4502 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4503 gimple_seq_add_stmt (dlist, g);
4505 tree body = create_artificial_label (UNKNOWN_LOCATION);
4506 tree header = create_artificial_label (UNKNOWN_LOCATION);
4507 tree end = create_artificial_label (UNKNOWN_LOCATION);
4508 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4509 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4511 gimple_seq_add_seq (dlist, llist[2]);
4513 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4514 gimple_seq_add_stmt (dlist, g);
4516 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4517 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4518 gimple_seq_add_stmt (dlist, g);
4520 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4522 for (int i = 0; i < 2; i++)
4523 if (llist[i])
4525 tree vf = create_tmp_var (unsigned_type_node);
4526 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4527 gimple_call_set_lhs (g, vf);
4528 gimple_seq *seq = i == 0 ? ilist : dlist;
4529 gimple_seq_add_stmt (seq, g);
4530 tree t = build_int_cst (unsigned_type_node, 0);
4531 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4532 gimple_seq_add_stmt (seq, g);
4533 tree body = create_artificial_label (UNKNOWN_LOCATION);
4534 tree header = create_artificial_label (UNKNOWN_LOCATION);
4535 tree end = create_artificial_label (UNKNOWN_LOCATION);
4536 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4537 gimple_seq_add_stmt (seq, gimple_build_label (body));
4538 gimple_seq_add_seq (seq, llist[i]);
4539 t = build_int_cst (unsigned_type_node, 1);
4540 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4541 gimple_seq_add_stmt (seq, g);
4542 gimple_seq_add_stmt (seq, gimple_build_label (header));
4543 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4544 gimple_seq_add_stmt (seq, g);
4545 gimple_seq_add_stmt (seq, gimple_build_label (end));
4549 /* The copyin sequence is not to be executed by the main thread, since
4550 that would result in self-copies. Perhaps not visible to scalars,
4551 but it certainly is to C++ operator=. */
4552 if (copyin_seq)
4554 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4556 x = build2 (NE_EXPR, boolean_type_node, x,
4557 build_int_cst (TREE_TYPE (x), 0));
4558 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4559 gimplify_and_add (x, ilist);
4562 /* If any copyin variable is passed by reference, we must ensure the
4563 master thread doesn't modify it before it is copied over in all
4564 threads. Similarly for variables in both firstprivate and
4565 lastprivate clauses we need to ensure the lastprivate copying
4566 happens after firstprivate copying in all threads. And similarly
4567 for UDRs if initializer expression refers to omp_orig. */
4568 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4570 /* Don't add any barrier for #pragma omp simd or
4571 #pragma omp distribute. */
4572 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4573 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4574 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4577 /* If max_vf is non-zero, then we can use only a vectorization factor
4578 up to the max_vf we chose. So stick it into the safelen clause. */
4579 if (sctx.max_vf)
4581 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4582 OMP_CLAUSE_SAFELEN);
4583 if (c == NULL_TREE
4584 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4585 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4586 sctx.max_vf) == 1))
4588 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4589 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4590 sctx.max_vf);
4591 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4592 gimple_omp_for_set_clauses (ctx->stmt, c);
4598 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4599 both parallel and workshare constructs. PREDICATE may be NULL if it's
4600 always true. */
4602 static void
4603 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4604 omp_context *ctx)
4606 tree x, c, label = NULL, orig_clauses = clauses;
4607 bool par_clauses = false;
4608 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4610 /* Early exit if there are no lastprivate or linear clauses. */
4611 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4612 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4613 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4614 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4615 break;
4616 if (clauses == NULL)
4618 /* If this was a workshare clause, see if it had been combined
4619 with its parallel. In that case, look for the clauses on the
4620 parallel statement itself. */
4621 if (is_parallel_ctx (ctx))
4622 return;
4624 ctx = ctx->outer;
4625 if (ctx == NULL || !is_parallel_ctx (ctx))
4626 return;
4628 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4629 OMP_CLAUSE_LASTPRIVATE);
4630 if (clauses == NULL)
4631 return;
4632 par_clauses = true;
4635 bool maybe_simt = false;
4636 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4637 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4639 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4640 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4641 if (simduid)
4642 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4645 if (predicate)
4647 gcond *stmt;
4648 tree label_true, arm1, arm2;
4649 enum tree_code pred_code = TREE_CODE (predicate);
4651 label = create_artificial_label (UNKNOWN_LOCATION);
4652 label_true = create_artificial_label (UNKNOWN_LOCATION);
4653 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4655 arm1 = TREE_OPERAND (predicate, 0);
4656 arm2 = TREE_OPERAND (predicate, 1);
4657 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4658 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4660 else
4662 arm1 = predicate;
4663 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4664 arm2 = boolean_false_node;
4665 pred_code = NE_EXPR;
4667 if (maybe_simt)
4669 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4670 c = fold_convert (integer_type_node, c);
4671 simtcond = create_tmp_var (integer_type_node);
4672 gimplify_assign (simtcond, c, stmt_list);
4673 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4674 1, simtcond);
4675 c = create_tmp_var (integer_type_node);
4676 gimple_call_set_lhs (g, c);
4677 gimple_seq_add_stmt (stmt_list, g);
4678 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4679 label_true, label);
4681 else
4682 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4683 gimple_seq_add_stmt (stmt_list, stmt);
4684 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4687 for (c = clauses; c ;)
4689 tree var, new_var;
4690 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4693 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4694 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4696 var = OMP_CLAUSE_DECL (c);
4697 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4698 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4699 && is_taskloop_ctx (ctx))
4701 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4702 new_var = lookup_decl (var, ctx->outer);
4704 else
4706 new_var = lookup_decl (var, ctx);
4707 /* Avoid uninitialized warnings for lastprivate and
4708 for linear iterators. */
4709 if (predicate
4710 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4711 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4712 TREE_NO_WARNING (new_var) = 1;
4715 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4717 tree val = DECL_VALUE_EXPR (new_var);
4718 if (TREE_CODE (val) == ARRAY_REF
4719 && VAR_P (TREE_OPERAND (val, 0))
4720 && lookup_attribute ("omp simd array",
4721 DECL_ATTRIBUTES (TREE_OPERAND (val,
4722 0))))
4724 if (lastlane == NULL)
4726 lastlane = create_tmp_var (unsigned_type_node);
4727 gcall *g
4728 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4729 2, simduid,
4730 TREE_OPERAND (val, 1));
4731 gimple_call_set_lhs (g, lastlane);
4732 gimple_seq_add_stmt (stmt_list, g);
4734 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4735 TREE_OPERAND (val, 0), lastlane,
4736 NULL_TREE, NULL_TREE);
4737 if (maybe_simt)
4739 gcall *g;
4740 if (simtlast == NULL)
4742 simtlast = create_tmp_var (unsigned_type_node);
4743 g = gimple_build_call_internal
4744 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4745 gimple_call_set_lhs (g, simtlast);
4746 gimple_seq_add_stmt (stmt_list, g);
4748 x = build_call_expr_internal_loc
4749 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4750 TREE_TYPE (new_var), 2, new_var, simtlast);
4751 new_var = unshare_expr (new_var);
4752 gimplify_assign (new_var, x, stmt_list);
4753 new_var = unshare_expr (new_var);
4758 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4759 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4761 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4762 gimple_seq_add_seq (stmt_list,
4763 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4764 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4766 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4767 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4769 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4770 gimple_seq_add_seq (stmt_list,
4771 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4772 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4775 x = NULL_TREE;
4776 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4777 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4779 gcc_checking_assert (is_taskloop_ctx (ctx));
4780 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4781 ctx->outer->outer);
4782 if (is_global_var (ovar))
4783 x = ovar;
4785 if (!x)
4786 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4787 if (omp_is_reference (var))
4788 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4789 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4790 gimplify_and_add (x, stmt_list);
4792 c = OMP_CLAUSE_CHAIN (c);
4793 if (c == NULL && !par_clauses)
4795 /* If this was a workshare clause, see if it had been combined
4796 with its parallel. In that case, continue looking for the
4797 clauses also on the parallel statement itself. */
4798 if (is_parallel_ctx (ctx))
4799 break;
4801 ctx = ctx->outer;
4802 if (ctx == NULL || !is_parallel_ctx (ctx))
4803 break;
4805 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4806 OMP_CLAUSE_LASTPRIVATE);
4807 par_clauses = true;
4811 if (label)
4812 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4815 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4816 (which might be a placeholder). INNER is true if this is an inner
4817 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4818 join markers. Generate the before-loop forking sequence in
4819 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4820 general form of these sequences is
4822 GOACC_REDUCTION_SETUP
4823 GOACC_FORK
4824 GOACC_REDUCTION_INIT
4826 GOACC_REDUCTION_FINI
4827 GOACC_JOIN
4828 GOACC_REDUCTION_TEARDOWN. */
4830 static void
4831 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4832 gcall *fork, gcall *join, gimple_seq *fork_seq,
4833 gimple_seq *join_seq, omp_context *ctx)
4835 gimple_seq before_fork = NULL;
4836 gimple_seq after_fork = NULL;
4837 gimple_seq before_join = NULL;
4838 gimple_seq after_join = NULL;
4839 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4840 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4841 unsigned offset = 0;
4843 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4844 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4846 tree orig = OMP_CLAUSE_DECL (c);
4847 tree var = maybe_lookup_decl (orig, ctx);
4848 tree ref_to_res = NULL_TREE;
4849 tree incoming, outgoing, v1, v2, v3;
4850 bool is_private = false;
4852 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4853 if (rcode == MINUS_EXPR)
4854 rcode = PLUS_EXPR;
4855 else if (rcode == TRUTH_ANDIF_EXPR)
4856 rcode = BIT_AND_EXPR;
4857 else if (rcode == TRUTH_ORIF_EXPR)
4858 rcode = BIT_IOR_EXPR;
4859 tree op = build_int_cst (unsigned_type_node, rcode);
4861 if (!var)
4862 var = orig;
4864 incoming = outgoing = var;
4866 if (!inner)
4868 /* See if an outer construct also reduces this variable. */
4869 omp_context *outer = ctx;
4871 while (omp_context *probe = outer->outer)
4873 enum gimple_code type = gimple_code (probe->stmt);
4874 tree cls;
4876 switch (type)
4878 case GIMPLE_OMP_FOR:
4879 cls = gimple_omp_for_clauses (probe->stmt);
4880 break;
4882 case GIMPLE_OMP_TARGET:
4883 if (gimple_omp_target_kind (probe->stmt)
4884 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4885 goto do_lookup;
4887 cls = gimple_omp_target_clauses (probe->stmt);
4888 break;
4890 default:
4891 goto do_lookup;
4894 outer = probe;
4895 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4896 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4897 && orig == OMP_CLAUSE_DECL (cls))
4899 incoming = outgoing = lookup_decl (orig, probe);
4900 goto has_outer_reduction;
4902 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4903 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4904 && orig == OMP_CLAUSE_DECL (cls))
4906 is_private = true;
4907 goto do_lookup;
4911 do_lookup:
4912 /* This is the outermost construct with this reduction,
4913 see if there's a mapping for it. */
4914 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4915 && maybe_lookup_field (orig, outer) && !is_private)
4917 ref_to_res = build_receiver_ref (orig, false, outer);
4918 if (omp_is_reference (orig))
4919 ref_to_res = build_simple_mem_ref (ref_to_res);
4921 tree type = TREE_TYPE (var);
4922 if (POINTER_TYPE_P (type))
4923 type = TREE_TYPE (type);
4925 outgoing = var;
4926 incoming = omp_reduction_init_op (loc, rcode, type);
4928 else
4930 /* Try to look at enclosing contexts for reduction var,
4931 use original if no mapping found. */
4932 tree t = NULL_TREE;
4933 omp_context *c = ctx->outer;
4934 while (c && !t)
4936 t = maybe_lookup_decl (orig, c);
4937 c = c->outer;
4939 incoming = outgoing = (t ? t : orig);
4942 has_outer_reduction:;
4945 if (!ref_to_res)
4946 ref_to_res = integer_zero_node;
4948 if (omp_is_reference (orig))
4950 tree type = TREE_TYPE (var);
4951 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4953 if (!inner)
4955 tree x = create_tmp_var (TREE_TYPE (type), id);
4956 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4959 v1 = create_tmp_var (type, id);
4960 v2 = create_tmp_var (type, id);
4961 v3 = create_tmp_var (type, id);
4963 gimplify_assign (v1, var, fork_seq);
4964 gimplify_assign (v2, var, fork_seq);
4965 gimplify_assign (v3, var, fork_seq);
4967 var = build_simple_mem_ref (var);
4968 v1 = build_simple_mem_ref (v1);
4969 v2 = build_simple_mem_ref (v2);
4970 v3 = build_simple_mem_ref (v3);
4971 outgoing = build_simple_mem_ref (outgoing);
4973 if (!TREE_CONSTANT (incoming))
4974 incoming = build_simple_mem_ref (incoming);
4976 else
4977 v1 = v2 = v3 = var;
4979 /* Determine position in reduction buffer, which may be used
4980 by target. */
4981 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
4982 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
4983 offset = (offset + align - 1) & ~(align - 1);
4984 tree off = build_int_cst (sizetype, offset);
4985 offset += GET_MODE_SIZE (mode);
4987 if (!init_code)
4989 init_code = build_int_cst (integer_type_node,
4990 IFN_GOACC_REDUCTION_INIT);
4991 fini_code = build_int_cst (integer_type_node,
4992 IFN_GOACC_REDUCTION_FINI);
4993 setup_code = build_int_cst (integer_type_node,
4994 IFN_GOACC_REDUCTION_SETUP);
4995 teardown_code = build_int_cst (integer_type_node,
4996 IFN_GOACC_REDUCTION_TEARDOWN);
4999 tree setup_call
5000 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5001 TREE_TYPE (var), 6, setup_code,
5002 unshare_expr (ref_to_res),
5003 incoming, level, op, off);
5004 tree init_call
5005 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5006 TREE_TYPE (var), 6, init_code,
5007 unshare_expr (ref_to_res),
5008 v1, level, op, off);
5009 tree fini_call
5010 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5011 TREE_TYPE (var), 6, fini_code,
5012 unshare_expr (ref_to_res),
5013 v2, level, op, off);
5014 tree teardown_call
5015 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5016 TREE_TYPE (var), 6, teardown_code,
5017 ref_to_res, v3, level, op, off);
5019 gimplify_assign (v1, setup_call, &before_fork);
5020 gimplify_assign (v2, init_call, &after_fork);
5021 gimplify_assign (v3, fini_call, &before_join);
5022 gimplify_assign (outgoing, teardown_call, &after_join);
5025 /* Now stitch things together. */
5026 gimple_seq_add_seq (fork_seq, before_fork);
5027 if (fork)
5028 gimple_seq_add_stmt (fork_seq, fork);
5029 gimple_seq_add_seq (fork_seq, after_fork);
5031 gimple_seq_add_seq (join_seq, before_join);
5032 if (join)
5033 gimple_seq_add_stmt (join_seq, join);
5034 gimple_seq_add_seq (join_seq, after_join);
5037 /* Generate code to implement the REDUCTION clauses. */
5039 static void
5040 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5042 gimple_seq sub_seq = NULL;
5043 gimple *stmt;
5044 tree x, c;
5045 int count = 0;
5047 /* OpenACC loop reductions are handled elsewhere. */
5048 if (is_gimple_omp_oacc (ctx->stmt))
5049 return;
5051 /* SIMD reductions are handled in lower_rec_input_clauses. */
5052 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5053 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5054 return;
5056 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5057 update in that case, otherwise use a lock. */
5058 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5059 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5061 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5062 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5064 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5065 count = -1;
5066 break;
5068 count++;
5071 if (count == 0)
5072 return;
5074 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5076 tree var, ref, new_var, orig_var;
5077 enum tree_code code;
5078 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5080 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5081 continue;
5083 orig_var = var = OMP_CLAUSE_DECL (c);
5084 if (TREE_CODE (var) == MEM_REF)
5086 var = TREE_OPERAND (var, 0);
5087 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5088 var = TREE_OPERAND (var, 0);
5089 if (TREE_CODE (var) == INDIRECT_REF
5090 || TREE_CODE (var) == ADDR_EXPR)
5091 var = TREE_OPERAND (var, 0);
5092 orig_var = var;
5093 if (is_variable_sized (var))
5095 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5096 var = DECL_VALUE_EXPR (var);
5097 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5098 var = TREE_OPERAND (var, 0);
5099 gcc_assert (DECL_P (var));
5102 new_var = lookup_decl (var, ctx);
5103 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5104 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5105 ref = build_outer_var_ref (var, ctx);
5106 code = OMP_CLAUSE_REDUCTION_CODE (c);
5108 /* reduction(-:var) sums up the partial results, so it acts
5109 identically to reduction(+:var). */
5110 if (code == MINUS_EXPR)
5111 code = PLUS_EXPR;
5113 if (count == 1)
5115 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5117 addr = save_expr (addr);
5118 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5119 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5120 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5121 gimplify_and_add (x, stmt_seqp);
5122 return;
5124 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5126 tree d = OMP_CLAUSE_DECL (c);
5127 tree type = TREE_TYPE (d);
5128 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5129 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5130 tree ptype = build_pointer_type (TREE_TYPE (type));
5131 tree bias = TREE_OPERAND (d, 1);
5132 d = TREE_OPERAND (d, 0);
5133 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5135 tree b = TREE_OPERAND (d, 1);
5136 b = maybe_lookup_decl (b, ctx);
5137 if (b == NULL)
5139 b = TREE_OPERAND (d, 1);
5140 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5142 if (integer_zerop (bias))
5143 bias = b;
5144 else
5146 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5147 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5148 TREE_TYPE (b), b, bias);
5150 d = TREE_OPERAND (d, 0);
5152 /* For ref build_outer_var_ref already performs this, so
5153 only new_var needs a dereference. */
5154 if (TREE_CODE (d) == INDIRECT_REF)
5156 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5157 gcc_assert (omp_is_reference (var) && var == orig_var);
5159 else if (TREE_CODE (d) == ADDR_EXPR)
5161 if (orig_var == var)
5163 new_var = build_fold_addr_expr (new_var);
5164 ref = build_fold_addr_expr (ref);
5167 else
5169 gcc_assert (orig_var == var);
5170 if (omp_is_reference (var))
5171 ref = build_fold_addr_expr (ref);
5173 if (DECL_P (v))
5175 tree t = maybe_lookup_decl (v, ctx);
5176 if (t)
5177 v = t;
5178 else
5179 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5180 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5182 if (!integer_zerop (bias))
5184 bias = fold_convert_loc (clause_loc, sizetype, bias);
5185 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5186 TREE_TYPE (new_var), new_var,
5187 unshare_expr (bias));
5188 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5189 TREE_TYPE (ref), ref, bias);
5191 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5192 ref = fold_convert_loc (clause_loc, ptype, ref);
5193 tree m = create_tmp_var (ptype, NULL);
5194 gimplify_assign (m, new_var, stmt_seqp);
5195 new_var = m;
5196 m = create_tmp_var (ptype, NULL);
5197 gimplify_assign (m, ref, stmt_seqp);
5198 ref = m;
5199 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5200 tree body = create_artificial_label (UNKNOWN_LOCATION);
5201 tree end = create_artificial_label (UNKNOWN_LOCATION);
5202 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5203 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5204 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5205 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5207 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5208 tree decl_placeholder
5209 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5210 SET_DECL_VALUE_EXPR (placeholder, out);
5211 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5212 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5213 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5214 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5215 gimple_seq_add_seq (&sub_seq,
5216 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5217 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5218 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5219 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5221 else
5223 x = build2 (code, TREE_TYPE (out), out, priv);
5224 out = unshare_expr (out);
5225 gimplify_assign (out, x, &sub_seq);
5227 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5228 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5229 gimple_seq_add_stmt (&sub_seq, g);
5230 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5231 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5232 gimple_seq_add_stmt (&sub_seq, g);
5233 g = gimple_build_assign (i, PLUS_EXPR, i,
5234 build_int_cst (TREE_TYPE (i), 1));
5235 gimple_seq_add_stmt (&sub_seq, g);
5236 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5237 gimple_seq_add_stmt (&sub_seq, g);
5238 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5240 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5242 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5244 if (omp_is_reference (var)
5245 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5246 TREE_TYPE (ref)))
5247 ref = build_fold_addr_expr_loc (clause_loc, ref);
5248 SET_DECL_VALUE_EXPR (placeholder, ref);
5249 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5250 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5251 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5252 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5253 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5255 else
5257 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5258 ref = build_outer_var_ref (var, ctx);
5259 gimplify_assign (ref, x, &sub_seq);
5263 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5265 gimple_seq_add_stmt (stmt_seqp, stmt);
5267 gimple_seq_add_seq (stmt_seqp, sub_seq);
5269 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5271 gimple_seq_add_stmt (stmt_seqp, stmt);
5275 /* Generate code to implement the COPYPRIVATE clauses. */
5277 static void
5278 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5279 omp_context *ctx)
5281 tree c;
5283 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5285 tree var, new_var, ref, x;
5286 bool by_ref;
5287 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5289 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5290 continue;
5292 var = OMP_CLAUSE_DECL (c);
5293 by_ref = use_pointer_for_field (var, NULL);
5295 ref = build_sender_ref (var, ctx);
5296 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5297 if (by_ref)
5299 x = build_fold_addr_expr_loc (clause_loc, new_var);
5300 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5302 gimplify_assign (ref, x, slist);
5304 ref = build_receiver_ref (var, false, ctx);
5305 if (by_ref)
5307 ref = fold_convert_loc (clause_loc,
5308 build_pointer_type (TREE_TYPE (new_var)),
5309 ref);
5310 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5312 if (omp_is_reference (var))
5314 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5315 ref = build_simple_mem_ref_loc (clause_loc, ref);
5316 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5318 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5319 gimplify_and_add (x, rlist);
5324 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5325 and REDUCTION from the sender (aka parent) side. */
5327 static void
5328 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5329 omp_context *ctx)
5331 tree c, t;
5332 int ignored_looptemp = 0;
5333 bool is_taskloop = false;
5335 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5336 by GOMP_taskloop. */
5337 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5339 ignored_looptemp = 2;
5340 is_taskloop = true;
5343 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5345 tree val, ref, x, var;
5346 bool by_ref, do_in = false, do_out = false;
5347 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5349 switch (OMP_CLAUSE_CODE (c))
5351 case OMP_CLAUSE_PRIVATE:
5352 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5353 break;
5354 continue;
5355 case OMP_CLAUSE_FIRSTPRIVATE:
5356 case OMP_CLAUSE_COPYIN:
5357 case OMP_CLAUSE_LASTPRIVATE:
5358 case OMP_CLAUSE_REDUCTION:
5359 break;
5360 case OMP_CLAUSE_SHARED:
5361 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5362 break;
5363 continue;
5364 case OMP_CLAUSE__LOOPTEMP_:
5365 if (ignored_looptemp)
5367 ignored_looptemp--;
5368 continue;
5370 break;
5371 default:
5372 continue;
5375 val = OMP_CLAUSE_DECL (c);
5376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5377 && TREE_CODE (val) == MEM_REF)
5379 val = TREE_OPERAND (val, 0);
5380 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5381 val = TREE_OPERAND (val, 0);
5382 if (TREE_CODE (val) == INDIRECT_REF
5383 || TREE_CODE (val) == ADDR_EXPR)
5384 val = TREE_OPERAND (val, 0);
5385 if (is_variable_sized (val))
5386 continue;
5389 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5390 outer taskloop region. */
5391 omp_context *ctx_for_o = ctx;
5392 if (is_taskloop
5393 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5394 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5395 ctx_for_o = ctx->outer;
5397 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5399 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5400 && is_global_var (var))
5401 continue;
5403 t = omp_member_access_dummy_var (var);
5404 if (t)
5406 var = DECL_VALUE_EXPR (var);
5407 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5408 if (o != t)
5409 var = unshare_and_remap (var, t, o);
5410 else
5411 var = unshare_expr (var);
5414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5416 /* Handle taskloop firstprivate/lastprivate, where the
5417 lastprivate on GIMPLE_OMP_TASK is represented as
5418 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5419 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5420 x = omp_build_component_ref (ctx->sender_decl, f);
5421 if (use_pointer_for_field (val, ctx))
5422 var = build_fold_addr_expr (var);
5423 gimplify_assign (x, var, ilist);
5424 DECL_ABSTRACT_ORIGIN (f) = NULL;
5425 continue;
5428 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5429 || val == OMP_CLAUSE_DECL (c))
5430 && is_variable_sized (val))
5431 continue;
5432 by_ref = use_pointer_for_field (val, NULL);
5434 switch (OMP_CLAUSE_CODE (c))
5436 case OMP_CLAUSE_FIRSTPRIVATE:
5437 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5438 && !by_ref
5439 && is_task_ctx (ctx))
5440 TREE_NO_WARNING (var) = 1;
5441 do_in = true;
5442 break;
5444 case OMP_CLAUSE_PRIVATE:
5445 case OMP_CLAUSE_COPYIN:
5446 case OMP_CLAUSE__LOOPTEMP_:
5447 do_in = true;
5448 break;
5450 case OMP_CLAUSE_LASTPRIVATE:
5451 if (by_ref || omp_is_reference (val))
5453 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5454 continue;
5455 do_in = true;
5457 else
5459 do_out = true;
5460 if (lang_hooks.decls.omp_private_outer_ref (val))
5461 do_in = true;
5463 break;
5465 case OMP_CLAUSE_REDUCTION:
5466 do_in = true;
5467 if (val == OMP_CLAUSE_DECL (c))
5468 do_out = !(by_ref || omp_is_reference (val));
5469 else
5470 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5471 break;
5473 default:
5474 gcc_unreachable ();
5477 if (do_in)
5479 ref = build_sender_ref (val, ctx);
5480 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5481 gimplify_assign (ref, x, ilist);
5482 if (is_task_ctx (ctx))
5483 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5486 if (do_out)
5488 ref = build_sender_ref (val, ctx);
5489 gimplify_assign (var, ref, olist);
5494 /* Generate code to implement SHARED from the sender (aka parent)
5495 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5496 list things that got automatically shared. */
5498 static void
5499 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5501 tree var, ovar, nvar, t, f, x, record_type;
5503 if (ctx->record_type == NULL)
5504 return;
5506 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5507 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5509 ovar = DECL_ABSTRACT_ORIGIN (f);
5510 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5511 continue;
5513 nvar = maybe_lookup_decl (ovar, ctx);
5514 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5515 continue;
5517 /* If CTX is a nested parallel directive. Find the immediately
5518 enclosing parallel or workshare construct that contains a
5519 mapping for OVAR. */
5520 var = lookup_decl_in_outer_ctx (ovar, ctx);
5522 t = omp_member_access_dummy_var (var);
5523 if (t)
5525 var = DECL_VALUE_EXPR (var);
5526 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5527 if (o != t)
5528 var = unshare_and_remap (var, t, o);
5529 else
5530 var = unshare_expr (var);
5533 if (use_pointer_for_field (ovar, ctx))
5535 x = build_sender_ref (ovar, ctx);
5536 var = build_fold_addr_expr (var);
5537 gimplify_assign (x, var, ilist);
5539 else
5541 x = build_sender_ref (ovar, ctx);
5542 gimplify_assign (x, var, ilist);
5544 if (!TREE_READONLY (var)
5545 /* We don't need to receive a new reference to a result
5546 or parm decl. In fact we may not store to it as we will
5547 invalidate any pending RSO and generate wrong gimple
5548 during inlining. */
5549 && !((TREE_CODE (var) == RESULT_DECL
5550 || TREE_CODE (var) == PARM_DECL)
5551 && DECL_BY_REFERENCE (var)))
5553 x = build_sender_ref (ovar, ctx);
5554 gimplify_assign (var, x, olist);
5560 /* Emit an OpenACC head marker call, encapulating the partitioning and
5561 other information that must be processed by the target compiler.
5562 Return the maximum number of dimensions the associated loop might
5563 be partitioned over. */
5565 static unsigned
5566 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5567 gimple_seq *seq, omp_context *ctx)
5569 unsigned levels = 0;
5570 unsigned tag = 0;
5571 tree gang_static = NULL_TREE;
5572 auto_vec<tree, 5> args;
5574 args.quick_push (build_int_cst
5575 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5576 args.quick_push (ddvar);
5577 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5579 switch (OMP_CLAUSE_CODE (c))
5581 case OMP_CLAUSE_GANG:
5582 tag |= OLF_DIM_GANG;
5583 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5584 /* static:* is represented by -1, and we can ignore it, as
5585 scheduling is always static. */
5586 if (gang_static && integer_minus_onep (gang_static))
5587 gang_static = NULL_TREE;
5588 levels++;
5589 break;
5591 case OMP_CLAUSE_WORKER:
5592 tag |= OLF_DIM_WORKER;
5593 levels++;
5594 break;
5596 case OMP_CLAUSE_VECTOR:
5597 tag |= OLF_DIM_VECTOR;
5598 levels++;
5599 break;
5601 case OMP_CLAUSE_SEQ:
5602 tag |= OLF_SEQ;
5603 break;
5605 case OMP_CLAUSE_AUTO:
5606 tag |= OLF_AUTO;
5607 break;
5609 case OMP_CLAUSE_INDEPENDENT:
5610 tag |= OLF_INDEPENDENT;
5611 break;
5613 case OMP_CLAUSE_TILE:
5614 tag |= OLF_TILE;
5615 break;
5617 default:
5618 continue;
5622 if (gang_static)
5624 if (DECL_P (gang_static))
5625 gang_static = build_outer_var_ref (gang_static, ctx);
5626 tag |= OLF_GANG_STATIC;
5629 /* In a parallel region, loops are implicitly INDEPENDENT. */
5630 omp_context *tgt = enclosing_target_ctx (ctx);
5631 if (!tgt || is_oacc_parallel (tgt))
5632 tag |= OLF_INDEPENDENT;
5634 if (tag & OLF_TILE)
5635 /* Tiling could use all 3 levels. */
5636 levels = 3;
5637 else
5639 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5640 Ensure at least one level, or 2 for possible auto
5641 partitioning */
5642 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5643 << OLF_DIM_BASE) | OLF_SEQ));
5645 if (levels < 1u + maybe_auto)
5646 levels = 1u + maybe_auto;
5649 args.quick_push (build_int_cst (integer_type_node, levels));
5650 args.quick_push (build_int_cst (integer_type_node, tag));
5651 if (gang_static)
5652 args.quick_push (gang_static);
5654 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5655 gimple_set_location (call, loc);
5656 gimple_set_lhs (call, ddvar);
5657 gimple_seq_add_stmt (seq, call);
5659 return levels;
5662 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5663 partitioning level of the enclosed region. */
5665 static void
5666 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5667 tree tofollow, gimple_seq *seq)
5669 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5670 : IFN_UNIQUE_OACC_TAIL_MARK);
5671 tree marker = build_int_cst (integer_type_node, marker_kind);
5672 int nargs = 2 + (tofollow != NULL_TREE);
5673 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5674 marker, ddvar, tofollow);
5675 gimple_set_location (call, loc);
5676 gimple_set_lhs (call, ddvar);
5677 gimple_seq_add_stmt (seq, call);
5680 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5681 the loop clauses, from which we extract reductions. Initialize
5682 HEAD and TAIL. */
5684 static void
5685 lower_oacc_head_tail (location_t loc, tree clauses,
5686 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5688 bool inner = false;
5689 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5690 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5692 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5693 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5694 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5696 gcc_assert (count);
5697 for (unsigned done = 1; count; count--, done++)
5699 gimple_seq fork_seq = NULL;
5700 gimple_seq join_seq = NULL;
5702 tree place = build_int_cst (integer_type_node, -1);
5703 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5704 fork_kind, ddvar, place);
5705 gimple_set_location (fork, loc);
5706 gimple_set_lhs (fork, ddvar);
5708 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5709 join_kind, ddvar, place);
5710 gimple_set_location (join, loc);
5711 gimple_set_lhs (join, ddvar);
5713 /* Mark the beginning of this level sequence. */
5714 if (inner)
5715 lower_oacc_loop_marker (loc, ddvar, true,
5716 build_int_cst (integer_type_node, count),
5717 &fork_seq);
5718 lower_oacc_loop_marker (loc, ddvar, false,
5719 build_int_cst (integer_type_node, done),
5720 &join_seq);
5722 lower_oacc_reductions (loc, clauses, place, inner,
5723 fork, join, &fork_seq, &join_seq, ctx);
5725 /* Append this level to head. */
5726 gimple_seq_add_seq (head, fork_seq);
5727 /* Prepend it to tail. */
5728 gimple_seq_add_seq (&join_seq, *tail);
5729 *tail = join_seq;
5731 inner = true;
5734 /* Mark the end of the sequence. */
5735 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5736 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5739 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5740 catch handler and return it. This prevents programs from violating the
5741 structured block semantics with throws. */
5743 static gimple_seq
5744 maybe_catch_exception (gimple_seq body)
5746 gimple *g;
5747 tree decl;
5749 if (!flag_exceptions)
5750 return body;
5752 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5753 decl = lang_hooks.eh_protect_cleanup_actions ();
5754 else
5755 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5757 g = gimple_build_eh_must_not_throw (decl);
5758 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5759 GIMPLE_TRY_CATCH);
5761 return gimple_seq_alloc_with_stmt (g);
5765 /* Routines to lower OMP directives into OMP-GIMPLE. */
5767 /* If ctx is a worksharing context inside of a cancellable parallel
5768 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5769 and conditional branch to parallel's cancel_label to handle
5770 cancellation in the implicit barrier. */
5772 static void
5773 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5775 gimple *omp_return = gimple_seq_last_stmt (*body);
5776 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5777 if (gimple_omp_return_nowait_p (omp_return))
5778 return;
5779 if (ctx->outer
5780 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5781 && ctx->outer->cancellable)
5783 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5784 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5785 tree lhs = create_tmp_var (c_bool_type);
5786 gimple_omp_return_set_lhs (omp_return, lhs);
5787 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5788 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5789 fold_convert (c_bool_type,
5790 boolean_false_node),
5791 ctx->outer->cancel_label, fallthru_label);
5792 gimple_seq_add_stmt (body, g);
5793 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5797 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5798 CTX is the enclosing OMP context for the current statement. */
5800 static void
5801 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5803 tree block, control;
5804 gimple_stmt_iterator tgsi;
5805 gomp_sections *stmt;
5806 gimple *t;
5807 gbind *new_stmt, *bind;
5808 gimple_seq ilist, dlist, olist, new_body;
5810 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5812 push_gimplify_context ();
5814 dlist = NULL;
5815 ilist = NULL;
5816 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5817 &ilist, &dlist, ctx, NULL);
5819 new_body = gimple_omp_body (stmt);
5820 gimple_omp_set_body (stmt, NULL);
5821 tgsi = gsi_start (new_body);
5822 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5824 omp_context *sctx;
5825 gimple *sec_start;
5827 sec_start = gsi_stmt (tgsi);
5828 sctx = maybe_lookup_ctx (sec_start);
5829 gcc_assert (sctx);
5831 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5832 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5833 GSI_CONTINUE_LINKING);
5834 gimple_omp_set_body (sec_start, NULL);
5836 if (gsi_one_before_end_p (tgsi))
5838 gimple_seq l = NULL;
5839 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5840 &l, ctx);
5841 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5842 gimple_omp_section_set_last (sec_start);
5845 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5846 GSI_CONTINUE_LINKING);
5849 block = make_node (BLOCK);
5850 bind = gimple_build_bind (NULL, new_body, block);
5852 olist = NULL;
5853 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5855 block = make_node (BLOCK);
5856 new_stmt = gimple_build_bind (NULL, NULL, block);
5857 gsi_replace (gsi_p, new_stmt, true);
5859 pop_gimplify_context (new_stmt);
5860 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5861 BLOCK_VARS (block) = gimple_bind_vars (bind);
5862 if (BLOCK_VARS (block))
5863 TREE_USED (block) = 1;
5865 new_body = NULL;
5866 gimple_seq_add_seq (&new_body, ilist);
5867 gimple_seq_add_stmt (&new_body, stmt);
5868 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5869 gimple_seq_add_stmt (&new_body, bind);
5871 control = create_tmp_var (unsigned_type_node, ".section");
5872 t = gimple_build_omp_continue (control, control);
5873 gimple_omp_sections_set_control (stmt, control);
5874 gimple_seq_add_stmt (&new_body, t);
5876 gimple_seq_add_seq (&new_body, olist);
5877 if (ctx->cancellable)
5878 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5879 gimple_seq_add_seq (&new_body, dlist);
5881 new_body = maybe_catch_exception (new_body);
5883 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5884 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5885 t = gimple_build_omp_return (nowait);
5886 gimple_seq_add_stmt (&new_body, t);
5887 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5889 gimple_bind_set_body (new_stmt, new_body);
5893 /* A subroutine of lower_omp_single. Expand the simple form of
5894 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5896 if (GOMP_single_start ())
5897 BODY;
5898 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5900 FIXME. It may be better to delay expanding the logic of this until
5901 pass_expand_omp. The expanded logic may make the job more difficult
5902 to a synchronization analysis pass. */
5904 static void
5905 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5907 location_t loc = gimple_location (single_stmt);
5908 tree tlabel = create_artificial_label (loc);
5909 tree flabel = create_artificial_label (loc);
5910 gimple *call, *cond;
5911 tree lhs, decl;
5913 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5914 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5915 call = gimple_build_call (decl, 0);
5916 gimple_call_set_lhs (call, lhs);
5917 gimple_seq_add_stmt (pre_p, call);
5919 cond = gimple_build_cond (EQ_EXPR, lhs,
5920 fold_convert_loc (loc, TREE_TYPE (lhs),
5921 boolean_true_node),
5922 tlabel, flabel);
5923 gimple_seq_add_stmt (pre_p, cond);
5924 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5925 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5926 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5930 /* A subroutine of lower_omp_single. Expand the simple form of
5931 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5933 #pragma omp single copyprivate (a, b, c)
5935 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5938 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5940 BODY;
5941 copyout.a = a;
5942 copyout.b = b;
5943 copyout.c = c;
5944 GOMP_single_copy_end (&copyout);
5946 else
5948 a = copyout_p->a;
5949 b = copyout_p->b;
5950 c = copyout_p->c;
5952 GOMP_barrier ();
5955 FIXME. It may be better to delay expanding the logic of this until
5956 pass_expand_omp. The expanded logic may make the job more difficult
5957 to a synchronization analysis pass. */
5959 static void
5960 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5961 omp_context *ctx)
5963 tree ptr_type, t, l0, l1, l2, bfn_decl;
5964 gimple_seq copyin_seq;
5965 location_t loc = gimple_location (single_stmt);
5967 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5969 ptr_type = build_pointer_type (ctx->record_type);
5970 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5972 l0 = create_artificial_label (loc);
5973 l1 = create_artificial_label (loc);
5974 l2 = create_artificial_label (loc);
5976 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
5977 t = build_call_expr_loc (loc, bfn_decl, 0);
5978 t = fold_convert_loc (loc, ptr_type, t);
5979 gimplify_assign (ctx->receiver_decl, t, pre_p);
5981 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5982 build_int_cst (ptr_type, 0));
5983 t = build3 (COND_EXPR, void_type_node, t,
5984 build_and_jump (&l0), build_and_jump (&l1));
5985 gimplify_and_add (t, pre_p);
5987 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
5989 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5991 copyin_seq = NULL;
5992 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
5993 &copyin_seq, ctx);
5995 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
5996 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
5997 t = build_call_expr_loc (loc, bfn_decl, 1, t);
5998 gimplify_and_add (t, pre_p);
6000 t = build_and_jump (&l2);
6001 gimplify_and_add (t, pre_p);
6003 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6005 gimple_seq_add_seq (pre_p, copyin_seq);
6007 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6011 /* Expand code for an OpenMP single directive. */
6013 static void
6014 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6016 tree block;
6017 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6018 gbind *bind;
6019 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6021 push_gimplify_context ();
6023 block = make_node (BLOCK);
6024 bind = gimple_build_bind (NULL, NULL, block);
6025 gsi_replace (gsi_p, bind, true);
6026 bind_body = NULL;
6027 dlist = NULL;
6028 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6029 &bind_body, &dlist, ctx, NULL);
6030 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6032 gimple_seq_add_stmt (&bind_body, single_stmt);
6034 if (ctx->record_type)
6035 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6036 else
6037 lower_omp_single_simple (single_stmt, &bind_body);
6039 gimple_omp_set_body (single_stmt, NULL);
6041 gimple_seq_add_seq (&bind_body, dlist);
6043 bind_body = maybe_catch_exception (bind_body);
6045 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6046 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6047 gimple *g = gimple_build_omp_return (nowait);
6048 gimple_seq_add_stmt (&bind_body_tail, g);
6049 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6050 if (ctx->record_type)
6052 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6053 tree clobber = build_constructor (ctx->record_type, NULL);
6054 TREE_THIS_VOLATILE (clobber) = 1;
6055 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6056 clobber), GSI_SAME_STMT);
6058 gimple_seq_add_seq (&bind_body, bind_body_tail);
6059 gimple_bind_set_body (bind, bind_body);
6061 pop_gimplify_context (bind);
6063 gimple_bind_append_vars (bind, ctx->block_vars);
6064 BLOCK_VARS (block) = ctx->block_vars;
6065 if (BLOCK_VARS (block))
6066 TREE_USED (block) = 1;
6070 /* Expand code for an OpenMP master directive. */
6072 static void
6073 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6075 tree block, lab = NULL, x, bfn_decl;
6076 gimple *stmt = gsi_stmt (*gsi_p);
6077 gbind *bind;
6078 location_t loc = gimple_location (stmt);
6079 gimple_seq tseq;
6081 push_gimplify_context ();
6083 block = make_node (BLOCK);
6084 bind = gimple_build_bind (NULL, NULL, block);
6085 gsi_replace (gsi_p, bind, true);
6086 gimple_bind_add_stmt (bind, stmt);
6088 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6089 x = build_call_expr_loc (loc, bfn_decl, 0);
6090 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6091 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6092 tseq = NULL;
6093 gimplify_and_add (x, &tseq);
6094 gimple_bind_add_seq (bind, tseq);
6096 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6097 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6098 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6099 gimple_omp_set_body (stmt, NULL);
6101 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6103 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6105 pop_gimplify_context (bind);
6107 gimple_bind_append_vars (bind, ctx->block_vars);
6108 BLOCK_VARS (block) = ctx->block_vars;
6112 /* Expand code for an OpenMP taskgroup directive. */
6114 static void
6115 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6117 gimple *stmt = gsi_stmt (*gsi_p);
6118 gcall *x;
6119 gbind *bind;
6120 tree block = make_node (BLOCK);
6122 bind = gimple_build_bind (NULL, NULL, block);
6123 gsi_replace (gsi_p, bind, true);
6124 gimple_bind_add_stmt (bind, stmt);
6126 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6128 gimple_bind_add_stmt (bind, x);
6130 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6131 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6132 gimple_omp_set_body (stmt, NULL);
6134 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6136 gimple_bind_append_vars (bind, ctx->block_vars);
6137 BLOCK_VARS (block) = ctx->block_vars;
6141 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6143 static void
6144 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6145 omp_context *ctx)
6147 struct omp_for_data fd;
6148 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6149 return;
6151 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6152 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6153 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6154 if (!fd.ordered)
6155 return;
6157 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6158 tree c = gimple_omp_ordered_clauses (ord_stmt);
6159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6160 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6162 /* Merge depend clauses from multiple adjacent
6163 #pragma omp ordered depend(sink:...) constructs
6164 into one #pragma omp ordered depend(sink:...), so that
6165 we can optimize them together. */
6166 gimple_stmt_iterator gsi = *gsi_p;
6167 gsi_next (&gsi);
6168 while (!gsi_end_p (gsi))
6170 gimple *stmt = gsi_stmt (gsi);
6171 if (is_gimple_debug (stmt)
6172 || gimple_code (stmt) == GIMPLE_NOP)
6174 gsi_next (&gsi);
6175 continue;
6177 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6178 break;
6179 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6180 c = gimple_omp_ordered_clauses (ord_stmt2);
6181 if (c == NULL_TREE
6182 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6183 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6184 break;
6185 while (*list_p)
6186 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6187 *list_p = c;
6188 gsi_remove (&gsi, true);
6192 /* Canonicalize sink dependence clauses into one folded clause if
6193 possible.
6195 The basic algorithm is to create a sink vector whose first
6196 element is the GCD of all the first elements, and whose remaining
6197 elements are the minimum of the subsequent columns.
6199 We ignore dependence vectors whose first element is zero because
6200 such dependencies are known to be executed by the same thread.
6202 We take into account the direction of the loop, so a minimum
6203 becomes a maximum if the loop is iterating forwards. We also
6204 ignore sink clauses where the loop direction is unknown, or where
6205 the offsets are clearly invalid because they are not a multiple
6206 of the loop increment.
6208 For example:
6210 #pragma omp for ordered(2)
6211 for (i=0; i < N; ++i)
6212 for (j=0; j < M; ++j)
6214 #pragma omp ordered \
6215 depend(sink:i-8,j-2) \
6216 depend(sink:i,j-1) \ // Completely ignored because i+0.
6217 depend(sink:i-4,j-3) \
6218 depend(sink:i-6,j-4)
6219 #pragma omp ordered depend(source)
6222 Folded clause is:
6224 depend(sink:-gcd(8,4,6),-min(2,3,4))
6225 -or-
6226 depend(sink:-2,-2)
6229 /* FIXME: Computing GCD's where the first element is zero is
6230 non-trivial in the presence of collapsed loops. Do this later. */
6231 if (fd.collapse > 1)
6232 return;
6234 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6235 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6236 tree folded_dep = NULL_TREE;
6237 /* TRUE if the first dimension's offset is negative. */
6238 bool neg_offset_p = false;
6240 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6241 unsigned int i;
6242 while ((c = *list_p) != NULL)
6244 bool remove = false;
6246 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6247 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6248 goto next_ordered_clause;
6250 tree vec;
6251 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6252 vec && TREE_CODE (vec) == TREE_LIST;
6253 vec = TREE_CHAIN (vec), ++i)
6255 gcc_assert (i < len);
6257 /* omp_extract_for_data has canonicalized the condition. */
6258 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6259 || fd.loops[i].cond_code == GT_EXPR);
6260 bool forward = fd.loops[i].cond_code == LT_EXPR;
6261 bool maybe_lexically_later = true;
6263 /* While the committee makes up its mind, bail if we have any
6264 non-constant steps. */
6265 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6266 goto lower_omp_ordered_ret;
6268 tree itype = TREE_TYPE (TREE_VALUE (vec));
6269 if (POINTER_TYPE_P (itype))
6270 itype = sizetype;
6271 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6272 TYPE_PRECISION (itype),
6273 TYPE_SIGN (itype));
6275 /* Ignore invalid offsets that are not multiples of the step. */
6276 if (!wi::multiple_of_p
6277 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6278 UNSIGNED))
6280 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6281 "ignoring sink clause with offset that is not "
6282 "a multiple of the loop step");
6283 remove = true;
6284 goto next_ordered_clause;
6287 /* Calculate the first dimension. The first dimension of
6288 the folded dependency vector is the GCD of the first
6289 elements, while ignoring any first elements whose offset
6290 is 0. */
6291 if (i == 0)
6293 /* Ignore dependence vectors whose first dimension is 0. */
6294 if (offset == 0)
6296 remove = true;
6297 goto next_ordered_clause;
6299 else
6301 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6303 error_at (OMP_CLAUSE_LOCATION (c),
6304 "first offset must be in opposite direction "
6305 "of loop iterations");
6306 goto lower_omp_ordered_ret;
6308 if (forward)
6309 offset = -offset;
6310 neg_offset_p = forward;
6311 /* Initialize the first time around. */
6312 if (folded_dep == NULL_TREE)
6314 folded_dep = c;
6315 folded_deps[0] = offset;
6317 else
6318 folded_deps[0] = wi::gcd (folded_deps[0],
6319 offset, UNSIGNED);
6322 /* Calculate minimum for the remaining dimensions. */
6323 else
6325 folded_deps[len + i - 1] = offset;
6326 if (folded_dep == c)
6327 folded_deps[i] = offset;
6328 else if (maybe_lexically_later
6329 && !wi::eq_p (folded_deps[i], offset))
6331 if (forward ^ wi::gts_p (folded_deps[i], offset))
6333 unsigned int j;
6334 folded_dep = c;
6335 for (j = 1; j <= i; j++)
6336 folded_deps[j] = folded_deps[len + j - 1];
6338 else
6339 maybe_lexically_later = false;
6343 gcc_assert (i == len);
6345 remove = true;
6347 next_ordered_clause:
6348 if (remove)
6349 *list_p = OMP_CLAUSE_CHAIN (c);
6350 else
6351 list_p = &OMP_CLAUSE_CHAIN (c);
6354 if (folded_dep)
6356 if (neg_offset_p)
6357 folded_deps[0] = -folded_deps[0];
6359 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6360 if (POINTER_TYPE_P (itype))
6361 itype = sizetype;
6363 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6364 = wide_int_to_tree (itype, folded_deps[0]);
6365 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6366 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6369 lower_omp_ordered_ret:
6371 /* Ordered without clauses is #pragma omp threads, while we want
6372 a nop instead if we remove all clauses. */
6373 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6374 gsi_replace (gsi_p, gimple_build_nop (), true);
6378 /* Expand code for an OpenMP ordered directive. */
6380 static void
6381 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6383 tree block;
6384 gimple *stmt = gsi_stmt (*gsi_p), *g;
6385 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6386 gcall *x;
6387 gbind *bind;
6388 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6389 OMP_CLAUSE_SIMD);
6390 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6391 loop. */
6392 bool maybe_simt
6393 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6394 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6395 OMP_CLAUSE_THREADS);
6397 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6398 OMP_CLAUSE_DEPEND))
6400 /* FIXME: This is needs to be moved to the expansion to verify various
6401 conditions only testable on cfg with dominators computed, and also
6402 all the depend clauses to be merged still might need to be available
6403 for the runtime checks. */
6404 if (0)
6405 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6406 return;
6409 push_gimplify_context ();
6411 block = make_node (BLOCK);
6412 bind = gimple_build_bind (NULL, NULL, block);
6413 gsi_replace (gsi_p, bind, true);
6414 gimple_bind_add_stmt (bind, stmt);
6416 if (simd)
6418 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6419 build_int_cst (NULL_TREE, threads));
6420 cfun->has_simduid_loops = true;
6422 else
6423 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6425 gimple_bind_add_stmt (bind, x);
6427 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6428 if (maybe_simt)
6430 counter = create_tmp_var (integer_type_node);
6431 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6432 gimple_call_set_lhs (g, counter);
6433 gimple_bind_add_stmt (bind, g);
6435 body = create_artificial_label (UNKNOWN_LOCATION);
6436 test = create_artificial_label (UNKNOWN_LOCATION);
6437 gimple_bind_add_stmt (bind, gimple_build_label (body));
6439 tree simt_pred = create_tmp_var (integer_type_node);
6440 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6441 gimple_call_set_lhs (g, simt_pred);
6442 gimple_bind_add_stmt (bind, g);
6444 tree t = create_artificial_label (UNKNOWN_LOCATION);
6445 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6446 gimple_bind_add_stmt (bind, g);
6448 gimple_bind_add_stmt (bind, gimple_build_label (t));
6450 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6451 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6452 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6453 gimple_omp_set_body (stmt, NULL);
6455 if (maybe_simt)
6457 gimple_bind_add_stmt (bind, gimple_build_label (test));
6458 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6459 gimple_bind_add_stmt (bind, g);
6461 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6462 tree nonneg = create_tmp_var (integer_type_node);
6463 gimple_seq tseq = NULL;
6464 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6465 gimple_bind_add_seq (bind, tseq);
6467 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6468 gimple_call_set_lhs (g, nonneg);
6469 gimple_bind_add_stmt (bind, g);
6471 tree end = create_artificial_label (UNKNOWN_LOCATION);
6472 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6473 gimple_bind_add_stmt (bind, g);
6475 gimple_bind_add_stmt (bind, gimple_build_label (end));
6477 if (simd)
6478 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6479 build_int_cst (NULL_TREE, threads));
6480 else
6481 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6483 gimple_bind_add_stmt (bind, x);
6485 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6487 pop_gimplify_context (bind);
6489 gimple_bind_append_vars (bind, ctx->block_vars);
6490 BLOCK_VARS (block) = gimple_bind_vars (bind);
6494 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6495 substitution of a couple of function calls. But in the NAMED case,
6496 requires that languages coordinate a symbol name. It is therefore
6497 best put here in common code. */
6499 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6501 static void
6502 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6504 tree block;
6505 tree name, lock, unlock;
6506 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6507 gbind *bind;
6508 location_t loc = gimple_location (stmt);
6509 gimple_seq tbody;
6511 name = gimple_omp_critical_name (stmt);
6512 if (name)
6514 tree decl;
6516 if (!critical_name_mutexes)
6517 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6519 tree *n = critical_name_mutexes->get (name);
6520 if (n == NULL)
6522 char *new_str;
6524 decl = create_tmp_var_raw (ptr_type_node);
6526 new_str = ACONCAT ((".gomp_critical_user_",
6527 IDENTIFIER_POINTER (name), NULL));
6528 DECL_NAME (decl) = get_identifier (new_str);
6529 TREE_PUBLIC (decl) = 1;
6530 TREE_STATIC (decl) = 1;
6531 DECL_COMMON (decl) = 1;
6532 DECL_ARTIFICIAL (decl) = 1;
6533 DECL_IGNORED_P (decl) = 1;
6535 varpool_node::finalize_decl (decl);
6537 critical_name_mutexes->put (name, decl);
6539 else
6540 decl = *n;
6542 /* If '#pragma omp critical' is inside offloaded region or
6543 inside function marked as offloadable, the symbol must be
6544 marked as offloadable too. */
6545 omp_context *octx;
6546 if (cgraph_node::get (current_function_decl)->offloadable)
6547 varpool_node::get_create (decl)->offloadable = 1;
6548 else
6549 for (octx = ctx->outer; octx; octx = octx->outer)
6550 if (is_gimple_omp_offloaded (octx->stmt))
6552 varpool_node::get_create (decl)->offloadable = 1;
6553 break;
6556 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6557 lock = build_call_expr_loc (loc, lock, 1,
6558 build_fold_addr_expr_loc (loc, decl));
6560 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6561 unlock = build_call_expr_loc (loc, unlock, 1,
6562 build_fold_addr_expr_loc (loc, decl));
6564 else
6566 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6567 lock = build_call_expr_loc (loc, lock, 0);
6569 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6570 unlock = build_call_expr_loc (loc, unlock, 0);
6573 push_gimplify_context ();
6575 block = make_node (BLOCK);
6576 bind = gimple_build_bind (NULL, NULL, block);
6577 gsi_replace (gsi_p, bind, true);
6578 gimple_bind_add_stmt (bind, stmt);
6580 tbody = gimple_bind_body (bind);
6581 gimplify_and_add (lock, &tbody);
6582 gimple_bind_set_body (bind, tbody);
6584 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6585 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6586 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6587 gimple_omp_set_body (stmt, NULL);
6589 tbody = gimple_bind_body (bind);
6590 gimplify_and_add (unlock, &tbody);
6591 gimple_bind_set_body (bind, tbody);
6593 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6595 pop_gimplify_context (bind);
6596 gimple_bind_append_vars (bind, ctx->block_vars);
6597 BLOCK_VARS (block) = gimple_bind_vars (bind);
6600 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6601 for a lastprivate clause. Given a loop control predicate of (V
6602 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6603 is appended to *DLIST, iterator initialization is appended to
6604 *BODY_P. */
6606 static void
6607 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6608 gimple_seq *dlist, struct omp_context *ctx)
6610 tree clauses, cond, vinit;
6611 enum tree_code cond_code;
6612 gimple_seq stmts;
6614 cond_code = fd->loop.cond_code;
6615 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6617 /* When possible, use a strict equality expression. This can let VRP
6618 type optimizations deduce the value and remove a copy. */
6619 if (tree_fits_shwi_p (fd->loop.step))
6621 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6622 if (step == 1 || step == -1)
6623 cond_code = EQ_EXPR;
6626 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6627 || gimple_omp_for_grid_phony (fd->for_stmt))
6628 cond = omp_grid_lastprivate_predicate (fd);
6629 else
6631 tree n2 = fd->loop.n2;
6632 if (fd->collapse > 1
6633 && TREE_CODE (n2) != INTEGER_CST
6634 && gimple_omp_for_combined_into_p (fd->for_stmt))
6636 struct omp_context *taskreg_ctx = NULL;
6637 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6639 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6640 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6641 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6643 if (gimple_omp_for_combined_into_p (gfor))
6645 gcc_assert (ctx->outer->outer
6646 && is_parallel_ctx (ctx->outer->outer));
6647 taskreg_ctx = ctx->outer->outer;
6649 else
6651 struct omp_for_data outer_fd;
6652 omp_extract_for_data (gfor, &outer_fd, NULL);
6653 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6656 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6657 taskreg_ctx = ctx->outer->outer;
6659 else if (is_taskreg_ctx (ctx->outer))
6660 taskreg_ctx = ctx->outer;
6661 if (taskreg_ctx)
6663 int i;
6664 tree taskreg_clauses
6665 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6666 tree innerc = omp_find_clause (taskreg_clauses,
6667 OMP_CLAUSE__LOOPTEMP_);
6668 gcc_assert (innerc);
6669 for (i = 0; i < fd->collapse; i++)
6671 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6672 OMP_CLAUSE__LOOPTEMP_);
6673 gcc_assert (innerc);
6675 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6676 OMP_CLAUSE__LOOPTEMP_);
6677 if (innerc)
6678 n2 = fold_convert (TREE_TYPE (n2),
6679 lookup_decl (OMP_CLAUSE_DECL (innerc),
6680 taskreg_ctx));
6683 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6686 clauses = gimple_omp_for_clauses (fd->for_stmt);
6687 stmts = NULL;
6688 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6689 if (!gimple_seq_empty_p (stmts))
6691 gimple_seq_add_seq (&stmts, *dlist);
6692 *dlist = stmts;
6694 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6695 vinit = fd->loop.n1;
6696 if (cond_code == EQ_EXPR
6697 && tree_fits_shwi_p (fd->loop.n2)
6698 && ! integer_zerop (fd->loop.n2))
6699 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6700 else
6701 vinit = unshare_expr (vinit);
6703 /* Initialize the iterator variable, so that threads that don't execute
6704 any iterations don't execute the lastprivate clauses by accident. */
6705 gimplify_assign (fd->loop.v, vinit, body_p);
6710 /* Lower code for an OMP loop directive. */
6712 static void
6713 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6715 tree *rhs_p, block;
6716 struct omp_for_data fd, *fdp = NULL;
6717 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6718 gbind *new_stmt;
6719 gimple_seq omp_for_body, body, dlist;
6720 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6721 size_t i;
6723 push_gimplify_context ();
6725 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6727 block = make_node (BLOCK);
6728 new_stmt = gimple_build_bind (NULL, NULL, block);
6729 /* Replace at gsi right away, so that 'stmt' is no member
6730 of a sequence anymore as we're going to add to a different
6731 one below. */
6732 gsi_replace (gsi_p, new_stmt, true);
6734 /* Move declaration of temporaries in the loop body before we make
6735 it go away. */
6736 omp_for_body = gimple_omp_body (stmt);
6737 if (!gimple_seq_empty_p (omp_for_body)
6738 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6740 gbind *inner_bind
6741 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6742 tree vars = gimple_bind_vars (inner_bind);
6743 gimple_bind_append_vars (new_stmt, vars);
6744 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6745 keep them on the inner_bind and it's block. */
6746 gimple_bind_set_vars (inner_bind, NULL_TREE);
6747 if (gimple_bind_block (inner_bind))
6748 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6751 if (gimple_omp_for_combined_into_p (stmt))
6753 omp_extract_for_data (stmt, &fd, NULL);
6754 fdp = &fd;
6756 /* We need two temporaries with fd.loop.v type (istart/iend)
6757 and then (fd.collapse - 1) temporaries with the same
6758 type for count2 ... countN-1 vars if not constant. */
6759 size_t count = 2;
6760 tree type = fd.iter_type;
6761 if (fd.collapse > 1
6762 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6763 count += fd.collapse - 1;
6764 bool taskreg_for
6765 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6766 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6767 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6768 tree simtc = NULL;
6769 tree clauses = *pc;
6770 if (taskreg_for)
6771 outerc
6772 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6773 OMP_CLAUSE__LOOPTEMP_);
6774 if (ctx->simt_stmt)
6775 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6776 OMP_CLAUSE__LOOPTEMP_);
6777 for (i = 0; i < count; i++)
6779 tree temp;
6780 if (taskreg_for)
6782 gcc_assert (outerc);
6783 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6784 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6785 OMP_CLAUSE__LOOPTEMP_);
6787 else
6789 /* If there are 2 adjacent SIMD stmts, one with _simt_
6790 clause, another without, make sure they have the same
6791 decls in _looptemp_ clauses, because the outer stmt
6792 they are combined into will look up just one inner_stmt. */
6793 if (ctx->simt_stmt)
6794 temp = OMP_CLAUSE_DECL (simtc);
6795 else
6796 temp = create_tmp_var (type);
6797 insert_decl_map (&ctx->outer->cb, temp, temp);
6799 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6800 OMP_CLAUSE_DECL (*pc) = temp;
6801 pc = &OMP_CLAUSE_CHAIN (*pc);
6802 if (ctx->simt_stmt)
6803 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6804 OMP_CLAUSE__LOOPTEMP_);
6806 *pc = clauses;
6809 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6810 dlist = NULL;
6811 body = NULL;
6812 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6813 fdp);
6814 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6816 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6818 /* Lower the header expressions. At this point, we can assume that
6819 the header is of the form:
6821 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6823 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6824 using the .omp_data_s mapping, if needed. */
6825 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6827 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6828 if (!is_gimple_min_invariant (*rhs_p))
6829 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6831 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6832 if (!is_gimple_min_invariant (*rhs_p))
6833 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6835 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6836 if (!is_gimple_min_invariant (*rhs_p))
6837 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6840 /* Once lowered, extract the bounds and clauses. */
6841 omp_extract_for_data (stmt, &fd, NULL);
6843 if (is_gimple_omp_oacc (ctx->stmt)
6844 && !ctx_in_oacc_kernels_region (ctx))
6845 lower_oacc_head_tail (gimple_location (stmt),
6846 gimple_omp_for_clauses (stmt),
6847 &oacc_head, &oacc_tail, ctx);
6849 /* Add OpenACC partitioning and reduction markers just before the loop. */
6850 if (oacc_head)
6851 gimple_seq_add_seq (&body, oacc_head);
6853 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6855 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6856 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6857 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6858 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6860 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6861 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6862 OMP_CLAUSE_LINEAR_STEP (c)
6863 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6864 ctx);
6867 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6868 && gimple_omp_for_grid_phony (stmt));
6869 if (!phony_loop)
6870 gimple_seq_add_stmt (&body, stmt);
6871 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6873 if (!phony_loop)
6874 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6875 fd.loop.v));
6877 /* After the loop, add exit clauses. */
6878 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6880 if (ctx->cancellable)
6881 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6883 gimple_seq_add_seq (&body, dlist);
6885 body = maybe_catch_exception (body);
6887 if (!phony_loop)
6889 /* Region exit marker goes at the end of the loop body. */
6890 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6891 maybe_add_implicit_barrier_cancel (ctx, &body);
6894 /* Add OpenACC joining and reduction markers just after the loop. */
6895 if (oacc_tail)
6896 gimple_seq_add_seq (&body, oacc_tail);
6898 pop_gimplify_context (new_stmt);
6900 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6901 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6902 if (BLOCK_VARS (block))
6903 TREE_USED (block) = 1;
6905 gimple_bind_set_body (new_stmt, body);
6906 gimple_omp_set_body (stmt, NULL);
6907 gimple_omp_for_set_pre_body (stmt, NULL);
6910 /* Callback for walk_stmts. Check if the current statement only contains
6911 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6913 static tree
6914 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6915 bool *handled_ops_p,
6916 struct walk_stmt_info *wi)
6918 int *info = (int *) wi->info;
6919 gimple *stmt = gsi_stmt (*gsi_p);
6921 *handled_ops_p = true;
6922 switch (gimple_code (stmt))
6924 WALK_SUBSTMTS;
6926 case GIMPLE_OMP_FOR:
6927 case GIMPLE_OMP_SECTIONS:
6928 *info = *info == 0 ? 1 : -1;
6929 break;
6930 default:
6931 *info = -1;
6932 break;
6934 return NULL;
6937 struct omp_taskcopy_context
6939 /* This field must be at the beginning, as we do "inheritance": Some
6940 callback functions for tree-inline.c (e.g., omp_copy_decl)
6941 receive a copy_body_data pointer that is up-casted to an
6942 omp_context pointer. */
6943 copy_body_data cb;
6944 omp_context *ctx;
6947 static tree
6948 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6950 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6952 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6953 return create_tmp_var (TREE_TYPE (var));
6955 return var;
6958 static tree
6959 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6961 tree name, new_fields = NULL, type, f;
6963 type = lang_hooks.types.make_type (RECORD_TYPE);
6964 name = DECL_NAME (TYPE_NAME (orig_type));
6965 name = build_decl (gimple_location (tcctx->ctx->stmt),
6966 TYPE_DECL, name, type);
6967 TYPE_NAME (type) = name;
6969 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
6971 tree new_f = copy_node (f);
6972 DECL_CONTEXT (new_f) = type;
6973 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
6974 TREE_CHAIN (new_f) = new_fields;
6975 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6976 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6977 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
6978 &tcctx->cb, NULL);
6979 new_fields = new_f;
6980 tcctx->cb.decl_map->put (f, new_f);
6982 TYPE_FIELDS (type) = nreverse (new_fields);
6983 layout_type (type);
6984 return type;
6987 /* Create task copyfn. */
6989 static void
6990 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
6992 struct function *child_cfun;
6993 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
6994 tree record_type, srecord_type, bind, list;
6995 bool record_needs_remap = false, srecord_needs_remap = false;
6996 splay_tree_node n;
6997 struct omp_taskcopy_context tcctx;
6998 location_t loc = gimple_location (task_stmt);
7000 child_fn = gimple_omp_task_copy_fn (task_stmt);
7001 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7002 gcc_assert (child_cfun->cfg == NULL);
7003 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7005 /* Reset DECL_CONTEXT on function arguments. */
7006 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7007 DECL_CONTEXT (t) = child_fn;
7009 /* Populate the function. */
7010 push_gimplify_context ();
7011 push_cfun (child_cfun);
7013 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7014 TREE_SIDE_EFFECTS (bind) = 1;
7015 list = NULL;
7016 DECL_SAVED_TREE (child_fn) = bind;
7017 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7019 /* Remap src and dst argument types if needed. */
7020 record_type = ctx->record_type;
7021 srecord_type = ctx->srecord_type;
7022 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7023 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7025 record_needs_remap = true;
7026 break;
7028 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7029 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7031 srecord_needs_remap = true;
7032 break;
7035 if (record_needs_remap || srecord_needs_remap)
7037 memset (&tcctx, '\0', sizeof (tcctx));
7038 tcctx.cb.src_fn = ctx->cb.src_fn;
7039 tcctx.cb.dst_fn = child_fn;
7040 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7041 gcc_checking_assert (tcctx.cb.src_node);
7042 tcctx.cb.dst_node = tcctx.cb.src_node;
7043 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7044 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7045 tcctx.cb.eh_lp_nr = 0;
7046 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7047 tcctx.cb.decl_map = new hash_map<tree, tree>;
7048 tcctx.ctx = ctx;
7050 if (record_needs_remap)
7051 record_type = task_copyfn_remap_type (&tcctx, record_type);
7052 if (srecord_needs_remap)
7053 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7055 else
7056 tcctx.cb.decl_map = NULL;
7058 arg = DECL_ARGUMENTS (child_fn);
7059 TREE_TYPE (arg) = build_pointer_type (record_type);
7060 sarg = DECL_CHAIN (arg);
7061 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7063 /* First pass: initialize temporaries used in record_type and srecord_type
7064 sizes and field offsets. */
7065 if (tcctx.cb.decl_map)
7066 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7067 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7069 tree *p;
7071 decl = OMP_CLAUSE_DECL (c);
7072 p = tcctx.cb.decl_map->get (decl);
7073 if (p == NULL)
7074 continue;
7075 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7076 sf = (tree) n->value;
7077 sf = *tcctx.cb.decl_map->get (sf);
7078 src = build_simple_mem_ref_loc (loc, sarg);
7079 src = omp_build_component_ref (src, sf);
7080 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7081 append_to_statement_list (t, &list);
7084 /* Second pass: copy shared var pointers and copy construct non-VLA
7085 firstprivate vars. */
7086 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7087 switch (OMP_CLAUSE_CODE (c))
7089 splay_tree_key key;
7090 case OMP_CLAUSE_SHARED:
7091 decl = OMP_CLAUSE_DECL (c);
7092 key = (splay_tree_key) decl;
7093 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7094 key = (splay_tree_key) &DECL_UID (decl);
7095 n = splay_tree_lookup (ctx->field_map, key);
7096 if (n == NULL)
7097 break;
7098 f = (tree) n->value;
7099 if (tcctx.cb.decl_map)
7100 f = *tcctx.cb.decl_map->get (f);
7101 n = splay_tree_lookup (ctx->sfield_map, key);
7102 sf = (tree) n->value;
7103 if (tcctx.cb.decl_map)
7104 sf = *tcctx.cb.decl_map->get (sf);
7105 src = build_simple_mem_ref_loc (loc, sarg);
7106 src = omp_build_component_ref (src, sf);
7107 dst = build_simple_mem_ref_loc (loc, arg);
7108 dst = omp_build_component_ref (dst, f);
7109 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7110 append_to_statement_list (t, &list);
7111 break;
7112 case OMP_CLAUSE_FIRSTPRIVATE:
7113 decl = OMP_CLAUSE_DECL (c);
7114 if (is_variable_sized (decl))
7115 break;
7116 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7117 if (n == NULL)
7118 break;
7119 f = (tree) n->value;
7120 if (tcctx.cb.decl_map)
7121 f = *tcctx.cb.decl_map->get (f);
7122 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7123 if (n != NULL)
7125 sf = (tree) n->value;
7126 if (tcctx.cb.decl_map)
7127 sf = *tcctx.cb.decl_map->get (sf);
7128 src = build_simple_mem_ref_loc (loc, sarg);
7129 src = omp_build_component_ref (src, sf);
7130 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7131 src = build_simple_mem_ref_loc (loc, src);
7133 else
7134 src = decl;
7135 dst = build_simple_mem_ref_loc (loc, arg);
7136 dst = omp_build_component_ref (dst, f);
7137 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7138 append_to_statement_list (t, &list);
7139 break;
7140 case OMP_CLAUSE_PRIVATE:
7141 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7142 break;
7143 decl = OMP_CLAUSE_DECL (c);
7144 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7145 f = (tree) n->value;
7146 if (tcctx.cb.decl_map)
7147 f = *tcctx.cb.decl_map->get (f);
7148 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7149 if (n != NULL)
7151 sf = (tree) n->value;
7152 if (tcctx.cb.decl_map)
7153 sf = *tcctx.cb.decl_map->get (sf);
7154 src = build_simple_mem_ref_loc (loc, sarg);
7155 src = omp_build_component_ref (src, sf);
7156 if (use_pointer_for_field (decl, NULL))
7157 src = build_simple_mem_ref_loc (loc, src);
7159 else
7160 src = decl;
7161 dst = build_simple_mem_ref_loc (loc, arg);
7162 dst = omp_build_component_ref (dst, f);
7163 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7164 append_to_statement_list (t, &list);
7165 break;
7166 default:
7167 break;
7170 /* Last pass: handle VLA firstprivates. */
7171 if (tcctx.cb.decl_map)
7172 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7173 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7175 tree ind, ptr, df;
7177 decl = OMP_CLAUSE_DECL (c);
7178 if (!is_variable_sized (decl))
7179 continue;
7180 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7181 if (n == NULL)
7182 continue;
7183 f = (tree) n->value;
7184 f = *tcctx.cb.decl_map->get (f);
7185 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7186 ind = DECL_VALUE_EXPR (decl);
7187 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7188 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7189 n = splay_tree_lookup (ctx->sfield_map,
7190 (splay_tree_key) TREE_OPERAND (ind, 0));
7191 sf = (tree) n->value;
7192 sf = *tcctx.cb.decl_map->get (sf);
7193 src = build_simple_mem_ref_loc (loc, sarg);
7194 src = omp_build_component_ref (src, sf);
7195 src = build_simple_mem_ref_loc (loc, src);
7196 dst = build_simple_mem_ref_loc (loc, arg);
7197 dst = omp_build_component_ref (dst, f);
7198 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7199 append_to_statement_list (t, &list);
7200 n = splay_tree_lookup (ctx->field_map,
7201 (splay_tree_key) TREE_OPERAND (ind, 0));
7202 df = (tree) n->value;
7203 df = *tcctx.cb.decl_map->get (df);
7204 ptr = build_simple_mem_ref_loc (loc, arg);
7205 ptr = omp_build_component_ref (ptr, df);
7206 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7207 build_fold_addr_expr_loc (loc, dst));
7208 append_to_statement_list (t, &list);
7211 t = build1 (RETURN_EXPR, void_type_node, NULL);
7212 append_to_statement_list (t, &list);
7214 if (tcctx.cb.decl_map)
7215 delete tcctx.cb.decl_map;
7216 pop_gimplify_context (NULL);
7217 BIND_EXPR_BODY (bind) = list;
7218 pop_cfun ();
7221 static void
7222 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7224 tree c, clauses;
7225 gimple *g;
7226 size_t n_in = 0, n_out = 0, idx = 2, i;
7228 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7229 gcc_assert (clauses);
7230 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7231 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7232 switch (OMP_CLAUSE_DEPEND_KIND (c))
7234 case OMP_CLAUSE_DEPEND_IN:
7235 n_in++;
7236 break;
7237 case OMP_CLAUSE_DEPEND_OUT:
7238 case OMP_CLAUSE_DEPEND_INOUT:
7239 n_out++;
7240 break;
7241 case OMP_CLAUSE_DEPEND_SOURCE:
7242 case OMP_CLAUSE_DEPEND_SINK:
7243 /* FALLTHRU */
7244 default:
7245 gcc_unreachable ();
7247 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7248 tree array = create_tmp_var (type);
7249 TREE_ADDRESSABLE (array) = 1;
7250 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7251 NULL_TREE);
7252 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7253 gimple_seq_add_stmt (iseq, g);
7254 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7255 NULL_TREE);
7256 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7257 gimple_seq_add_stmt (iseq, g);
7258 for (i = 0; i < 2; i++)
7260 if ((i ? n_in : n_out) == 0)
7261 continue;
7262 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7263 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7264 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7266 tree t = OMP_CLAUSE_DECL (c);
7267 t = fold_convert (ptr_type_node, t);
7268 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7269 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7270 NULL_TREE, NULL_TREE);
7271 g = gimple_build_assign (r, t);
7272 gimple_seq_add_stmt (iseq, g);
7275 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7276 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7277 OMP_CLAUSE_CHAIN (c) = *pclauses;
7278 *pclauses = c;
7279 tree clobber = build_constructor (type, NULL);
7280 TREE_THIS_VOLATILE (clobber) = 1;
7281 g = gimple_build_assign (array, clobber);
7282 gimple_seq_add_stmt (oseq, g);
7285 /* Lower the OpenMP parallel or task directive in the current statement
7286 in GSI_P. CTX holds context information for the directive. */
7288 static void
7289 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7291 tree clauses;
7292 tree child_fn, t;
7293 gimple *stmt = gsi_stmt (*gsi_p);
7294 gbind *par_bind, *bind, *dep_bind = NULL;
7295 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7296 location_t loc = gimple_location (stmt);
7298 clauses = gimple_omp_taskreg_clauses (stmt);
7299 par_bind
7300 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7301 par_body = gimple_bind_body (par_bind);
7302 child_fn = ctx->cb.dst_fn;
7303 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7304 && !gimple_omp_parallel_combined_p (stmt))
7306 struct walk_stmt_info wi;
7307 int ws_num = 0;
7309 memset (&wi, 0, sizeof (wi));
7310 wi.info = &ws_num;
7311 wi.val_only = true;
7312 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7313 if (ws_num == 1)
7314 gimple_omp_parallel_set_combined_p (stmt, true);
7316 gimple_seq dep_ilist = NULL;
7317 gimple_seq dep_olist = NULL;
7318 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7319 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7321 push_gimplify_context ();
7322 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7323 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7324 &dep_ilist, &dep_olist);
7327 if (ctx->srecord_type)
7328 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7330 push_gimplify_context ();
7332 par_olist = NULL;
7333 par_ilist = NULL;
7334 par_rlist = NULL;
7335 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7336 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7337 if (phony_construct && ctx->record_type)
7339 gcc_checking_assert (!ctx->receiver_decl);
7340 ctx->receiver_decl = create_tmp_var
7341 (build_reference_type (ctx->record_type), ".omp_rec");
7343 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7344 lower_omp (&par_body, ctx);
7345 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7346 lower_reduction_clauses (clauses, &par_rlist, ctx);
7348 /* Declare all the variables created by mapping and the variables
7349 declared in the scope of the parallel body. */
7350 record_vars_into (ctx->block_vars, child_fn);
7351 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7353 if (ctx->record_type)
7355 ctx->sender_decl
7356 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7357 : ctx->record_type, ".omp_data_o");
7358 DECL_NAMELESS (ctx->sender_decl) = 1;
7359 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7360 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7363 olist = NULL;
7364 ilist = NULL;
7365 lower_send_clauses (clauses, &ilist, &olist, ctx);
7366 lower_send_shared_vars (&ilist, &olist, ctx);
7368 if (ctx->record_type)
7370 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7371 TREE_THIS_VOLATILE (clobber) = 1;
7372 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7373 clobber));
7376 /* Once all the expansions are done, sequence all the different
7377 fragments inside gimple_omp_body. */
7379 new_body = NULL;
7381 if (ctx->record_type)
7383 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7384 /* fixup_child_record_type might have changed receiver_decl's type. */
7385 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7386 gimple_seq_add_stmt (&new_body,
7387 gimple_build_assign (ctx->receiver_decl, t));
7390 gimple_seq_add_seq (&new_body, par_ilist);
7391 gimple_seq_add_seq (&new_body, par_body);
7392 gimple_seq_add_seq (&new_body, par_rlist);
7393 if (ctx->cancellable)
7394 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7395 gimple_seq_add_seq (&new_body, par_olist);
7396 new_body = maybe_catch_exception (new_body);
7397 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7398 gimple_seq_add_stmt (&new_body,
7399 gimple_build_omp_continue (integer_zero_node,
7400 integer_zero_node));
7401 if (!phony_construct)
7403 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7404 gimple_omp_set_body (stmt, new_body);
7407 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7408 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7409 gimple_bind_add_seq (bind, ilist);
7410 if (!phony_construct)
7411 gimple_bind_add_stmt (bind, stmt);
7412 else
7413 gimple_bind_add_seq (bind, new_body);
7414 gimple_bind_add_seq (bind, olist);
7416 pop_gimplify_context (NULL);
7418 if (dep_bind)
7420 gimple_bind_add_seq (dep_bind, dep_ilist);
7421 gimple_bind_add_stmt (dep_bind, bind);
7422 gimple_bind_add_seq (dep_bind, dep_olist);
7423 pop_gimplify_context (dep_bind);
7427 /* Lower the GIMPLE_OMP_TARGET in the current statement
7428 in GSI_P. CTX holds context information for the directive. */
7430 static void
7431 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7433 tree clauses;
7434 tree child_fn, t, c;
7435 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7436 gbind *tgt_bind, *bind, *dep_bind = NULL;
7437 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7438 location_t loc = gimple_location (stmt);
7439 bool offloaded, data_region;
7440 unsigned int map_cnt = 0;
7442 offloaded = is_gimple_omp_offloaded (stmt);
7443 switch (gimple_omp_target_kind (stmt))
7445 case GF_OMP_TARGET_KIND_REGION:
7446 case GF_OMP_TARGET_KIND_UPDATE:
7447 case GF_OMP_TARGET_KIND_ENTER_DATA:
7448 case GF_OMP_TARGET_KIND_EXIT_DATA:
7449 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7450 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7451 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7452 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7453 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7454 data_region = false;
7455 break;
7456 case GF_OMP_TARGET_KIND_DATA:
7457 case GF_OMP_TARGET_KIND_OACC_DATA:
7458 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7459 data_region = true;
7460 break;
7461 default:
7462 gcc_unreachable ();
7465 clauses = gimple_omp_target_clauses (stmt);
7467 gimple_seq dep_ilist = NULL;
7468 gimple_seq dep_olist = NULL;
7469 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7471 push_gimplify_context ();
7472 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7473 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7474 &dep_ilist, &dep_olist);
7477 tgt_bind = NULL;
7478 tgt_body = NULL;
7479 if (offloaded)
7481 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7482 tgt_body = gimple_bind_body (tgt_bind);
7484 else if (data_region)
7485 tgt_body = gimple_omp_body (stmt);
7486 child_fn = ctx->cb.dst_fn;
7488 push_gimplify_context ();
7489 fplist = NULL;
7491 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7492 switch (OMP_CLAUSE_CODE (c))
7494 tree var, x;
7496 default:
7497 break;
7498 case OMP_CLAUSE_MAP:
7499 #if CHECKING_P
7500 /* First check what we're prepared to handle in the following. */
7501 switch (OMP_CLAUSE_MAP_KIND (c))
7503 case GOMP_MAP_ALLOC:
7504 case GOMP_MAP_TO:
7505 case GOMP_MAP_FROM:
7506 case GOMP_MAP_TOFROM:
7507 case GOMP_MAP_POINTER:
7508 case GOMP_MAP_TO_PSET:
7509 case GOMP_MAP_DELETE:
7510 case GOMP_MAP_RELEASE:
7511 case GOMP_MAP_ALWAYS_TO:
7512 case GOMP_MAP_ALWAYS_FROM:
7513 case GOMP_MAP_ALWAYS_TOFROM:
7514 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7515 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7516 case GOMP_MAP_STRUCT:
7517 case GOMP_MAP_ALWAYS_POINTER:
7518 break;
7519 case GOMP_MAP_FORCE_ALLOC:
7520 case GOMP_MAP_FORCE_TO:
7521 case GOMP_MAP_FORCE_FROM:
7522 case GOMP_MAP_FORCE_TOFROM:
7523 case GOMP_MAP_FORCE_PRESENT:
7524 case GOMP_MAP_FORCE_DEVICEPTR:
7525 case GOMP_MAP_DEVICE_RESIDENT:
7526 case GOMP_MAP_LINK:
7527 gcc_assert (is_gimple_omp_oacc (stmt));
7528 break;
7529 default:
7530 gcc_unreachable ();
7532 #endif
7533 /* FALLTHRU */
7534 case OMP_CLAUSE_TO:
7535 case OMP_CLAUSE_FROM:
7536 oacc_firstprivate:
7537 var = OMP_CLAUSE_DECL (c);
7538 if (!DECL_P (var))
7540 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7541 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7542 && (OMP_CLAUSE_MAP_KIND (c)
7543 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7544 map_cnt++;
7545 continue;
7548 if (DECL_SIZE (var)
7549 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7551 tree var2 = DECL_VALUE_EXPR (var);
7552 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7553 var2 = TREE_OPERAND (var2, 0);
7554 gcc_assert (DECL_P (var2));
7555 var = var2;
7558 if (offloaded
7559 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7560 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7561 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7563 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7565 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7566 && varpool_node::get_create (var)->offloadable)
7567 continue;
7569 tree type = build_pointer_type (TREE_TYPE (var));
7570 tree new_var = lookup_decl (var, ctx);
7571 x = create_tmp_var_raw (type, get_name (new_var));
7572 gimple_add_tmp_var (x);
7573 x = build_simple_mem_ref (x);
7574 SET_DECL_VALUE_EXPR (new_var, x);
7575 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7577 continue;
7580 if (!maybe_lookup_field (var, ctx))
7581 continue;
7583 /* Don't remap oacc parallel reduction variables, because the
7584 intermediate result must be local to each gang. */
7585 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7586 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7588 x = build_receiver_ref (var, true, ctx);
7589 tree new_var = lookup_decl (var, ctx);
7591 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7592 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7593 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7594 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7595 x = build_simple_mem_ref (x);
7596 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7598 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7599 if (omp_is_reference (new_var))
7601 /* Create a local object to hold the instance
7602 value. */
7603 tree type = TREE_TYPE (TREE_TYPE (new_var));
7604 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7605 tree inst = create_tmp_var (type, id);
7606 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7607 x = build_fold_addr_expr (inst);
7609 gimplify_assign (new_var, x, &fplist);
7611 else if (DECL_P (new_var))
7613 SET_DECL_VALUE_EXPR (new_var, x);
7614 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7616 else
7617 gcc_unreachable ();
7619 map_cnt++;
7620 break;
7622 case OMP_CLAUSE_FIRSTPRIVATE:
7623 if (is_oacc_parallel (ctx))
7624 goto oacc_firstprivate;
7625 map_cnt++;
7626 var = OMP_CLAUSE_DECL (c);
7627 if (!omp_is_reference (var)
7628 && !is_gimple_reg_type (TREE_TYPE (var)))
7630 tree new_var = lookup_decl (var, ctx);
7631 if (is_variable_sized (var))
7633 tree pvar = DECL_VALUE_EXPR (var);
7634 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7635 pvar = TREE_OPERAND (pvar, 0);
7636 gcc_assert (DECL_P (pvar));
7637 tree new_pvar = lookup_decl (pvar, ctx);
7638 x = build_fold_indirect_ref (new_pvar);
7639 TREE_THIS_NOTRAP (x) = 1;
7641 else
7642 x = build_receiver_ref (var, true, ctx);
7643 SET_DECL_VALUE_EXPR (new_var, x);
7644 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7646 break;
7648 case OMP_CLAUSE_PRIVATE:
7649 if (is_gimple_omp_oacc (ctx->stmt))
7650 break;
7651 var = OMP_CLAUSE_DECL (c);
7652 if (is_variable_sized (var))
7654 tree new_var = lookup_decl (var, ctx);
7655 tree pvar = DECL_VALUE_EXPR (var);
7656 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7657 pvar = TREE_OPERAND (pvar, 0);
7658 gcc_assert (DECL_P (pvar));
7659 tree new_pvar = lookup_decl (pvar, ctx);
7660 x = build_fold_indirect_ref (new_pvar);
7661 TREE_THIS_NOTRAP (x) = 1;
7662 SET_DECL_VALUE_EXPR (new_var, x);
7663 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7665 break;
7667 case OMP_CLAUSE_USE_DEVICE_PTR:
7668 case OMP_CLAUSE_IS_DEVICE_PTR:
7669 var = OMP_CLAUSE_DECL (c);
7670 map_cnt++;
7671 if (is_variable_sized (var))
7673 tree new_var = lookup_decl (var, ctx);
7674 tree pvar = DECL_VALUE_EXPR (var);
7675 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7676 pvar = TREE_OPERAND (pvar, 0);
7677 gcc_assert (DECL_P (pvar));
7678 tree new_pvar = lookup_decl (pvar, ctx);
7679 x = build_fold_indirect_ref (new_pvar);
7680 TREE_THIS_NOTRAP (x) = 1;
7681 SET_DECL_VALUE_EXPR (new_var, x);
7682 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7684 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7686 tree new_var = lookup_decl (var, ctx);
7687 tree type = build_pointer_type (TREE_TYPE (var));
7688 x = create_tmp_var_raw (type, get_name (new_var));
7689 gimple_add_tmp_var (x);
7690 x = build_simple_mem_ref (x);
7691 SET_DECL_VALUE_EXPR (new_var, x);
7692 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7694 else
7696 tree new_var = lookup_decl (var, ctx);
7697 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7698 gimple_add_tmp_var (x);
7699 SET_DECL_VALUE_EXPR (new_var, x);
7700 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7702 break;
7705 if (offloaded)
7707 target_nesting_level++;
7708 lower_omp (&tgt_body, ctx);
7709 target_nesting_level--;
7711 else if (data_region)
7712 lower_omp (&tgt_body, ctx);
7714 if (offloaded)
7716 /* Declare all the variables created by mapping and the variables
7717 declared in the scope of the target body. */
7718 record_vars_into (ctx->block_vars, child_fn);
7719 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7722 olist = NULL;
7723 ilist = NULL;
7724 if (ctx->record_type)
7726 ctx->sender_decl
7727 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7728 DECL_NAMELESS (ctx->sender_decl) = 1;
7729 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7730 t = make_tree_vec (3);
7731 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7732 TREE_VEC_ELT (t, 1)
7733 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7734 ".omp_data_sizes");
7735 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7736 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7737 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7738 tree tkind_type = short_unsigned_type_node;
7739 int talign_shift = 8;
7740 TREE_VEC_ELT (t, 2)
7741 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7742 ".omp_data_kinds");
7743 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7744 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7745 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7746 gimple_omp_target_set_data_arg (stmt, t);
7748 vec<constructor_elt, va_gc> *vsize;
7749 vec<constructor_elt, va_gc> *vkind;
7750 vec_alloc (vsize, map_cnt);
7751 vec_alloc (vkind, map_cnt);
7752 unsigned int map_idx = 0;
7754 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7755 switch (OMP_CLAUSE_CODE (c))
7757 tree ovar, nc, s, purpose, var, x, type;
7758 unsigned int talign;
7760 default:
7761 break;
7763 case OMP_CLAUSE_MAP:
7764 case OMP_CLAUSE_TO:
7765 case OMP_CLAUSE_FROM:
7766 oacc_firstprivate_map:
7767 nc = c;
7768 ovar = OMP_CLAUSE_DECL (c);
7769 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7770 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7771 || (OMP_CLAUSE_MAP_KIND (c)
7772 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7773 break;
7774 if (!DECL_P (ovar))
7776 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7777 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7779 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7780 == get_base_address (ovar));
7781 nc = OMP_CLAUSE_CHAIN (c);
7782 ovar = OMP_CLAUSE_DECL (nc);
7784 else
7786 tree x = build_sender_ref (ovar, ctx);
7787 tree v
7788 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7789 gimplify_assign (x, v, &ilist);
7790 nc = NULL_TREE;
7793 else
7795 if (DECL_SIZE (ovar)
7796 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7798 tree ovar2 = DECL_VALUE_EXPR (ovar);
7799 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7800 ovar2 = TREE_OPERAND (ovar2, 0);
7801 gcc_assert (DECL_P (ovar2));
7802 ovar = ovar2;
7804 if (!maybe_lookup_field (ovar, ctx))
7805 continue;
7808 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7809 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7810 talign = DECL_ALIGN_UNIT (ovar);
7811 if (nc)
7813 var = lookup_decl_in_outer_ctx (ovar, ctx);
7814 x = build_sender_ref (ovar, ctx);
7816 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7817 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7818 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7819 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7821 gcc_assert (offloaded);
7822 tree avar
7823 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7824 mark_addressable (avar);
7825 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7826 talign = DECL_ALIGN_UNIT (avar);
7827 avar = build_fold_addr_expr (avar);
7828 gimplify_assign (x, avar, &ilist);
7830 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7832 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7833 if (!omp_is_reference (var))
7835 if (is_gimple_reg (var)
7836 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7837 TREE_NO_WARNING (var) = 1;
7838 var = build_fold_addr_expr (var);
7840 else
7841 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7842 gimplify_assign (x, var, &ilist);
7844 else if (is_gimple_reg (var))
7846 gcc_assert (offloaded);
7847 tree avar = create_tmp_var (TREE_TYPE (var));
7848 mark_addressable (avar);
7849 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7850 if (GOMP_MAP_COPY_TO_P (map_kind)
7851 || map_kind == GOMP_MAP_POINTER
7852 || map_kind == GOMP_MAP_TO_PSET
7853 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7855 /* If we need to initialize a temporary
7856 with VAR because it is not addressable, and
7857 the variable hasn't been initialized yet, then
7858 we'll get a warning for the store to avar.
7859 Don't warn in that case, the mapping might
7860 be implicit. */
7861 TREE_NO_WARNING (var) = 1;
7862 gimplify_assign (avar, var, &ilist);
7864 avar = build_fold_addr_expr (avar);
7865 gimplify_assign (x, avar, &ilist);
7866 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7867 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7868 && !TYPE_READONLY (TREE_TYPE (var)))
7870 x = unshare_expr (x);
7871 x = build_simple_mem_ref (x);
7872 gimplify_assign (var, x, &olist);
7875 else
7877 var = build_fold_addr_expr (var);
7878 gimplify_assign (x, var, &ilist);
7881 s = NULL_TREE;
7882 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7884 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7885 s = TREE_TYPE (ovar);
7886 if (TREE_CODE (s) == REFERENCE_TYPE)
7887 s = TREE_TYPE (s);
7888 s = TYPE_SIZE_UNIT (s);
7890 else
7891 s = OMP_CLAUSE_SIZE (c);
7892 if (s == NULL_TREE)
7893 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7894 s = fold_convert (size_type_node, s);
7895 purpose = size_int (map_idx++);
7896 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7897 if (TREE_CODE (s) != INTEGER_CST)
7898 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7900 unsigned HOST_WIDE_INT tkind, tkind_zero;
7901 switch (OMP_CLAUSE_CODE (c))
7903 case OMP_CLAUSE_MAP:
7904 tkind = OMP_CLAUSE_MAP_KIND (c);
7905 tkind_zero = tkind;
7906 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7907 switch (tkind)
7909 case GOMP_MAP_ALLOC:
7910 case GOMP_MAP_TO:
7911 case GOMP_MAP_FROM:
7912 case GOMP_MAP_TOFROM:
7913 case GOMP_MAP_ALWAYS_TO:
7914 case GOMP_MAP_ALWAYS_FROM:
7915 case GOMP_MAP_ALWAYS_TOFROM:
7916 case GOMP_MAP_RELEASE:
7917 case GOMP_MAP_FORCE_TO:
7918 case GOMP_MAP_FORCE_FROM:
7919 case GOMP_MAP_FORCE_TOFROM:
7920 case GOMP_MAP_FORCE_PRESENT:
7921 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7922 break;
7923 case GOMP_MAP_DELETE:
7924 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7925 default:
7926 break;
7928 if (tkind_zero != tkind)
7930 if (integer_zerop (s))
7931 tkind = tkind_zero;
7932 else if (integer_nonzerop (s))
7933 tkind_zero = tkind;
7935 break;
7936 case OMP_CLAUSE_FIRSTPRIVATE:
7937 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7938 tkind = GOMP_MAP_TO;
7939 tkind_zero = tkind;
7940 break;
7941 case OMP_CLAUSE_TO:
7942 tkind = GOMP_MAP_TO;
7943 tkind_zero = tkind;
7944 break;
7945 case OMP_CLAUSE_FROM:
7946 tkind = GOMP_MAP_FROM;
7947 tkind_zero = tkind;
7948 break;
7949 default:
7950 gcc_unreachable ();
7952 gcc_checking_assert (tkind
7953 < (HOST_WIDE_INT_C (1U) << talign_shift));
7954 gcc_checking_assert (tkind_zero
7955 < (HOST_WIDE_INT_C (1U) << talign_shift));
7956 talign = ceil_log2 (talign);
7957 tkind |= talign << talign_shift;
7958 tkind_zero |= talign << talign_shift;
7959 gcc_checking_assert (tkind
7960 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7961 gcc_checking_assert (tkind_zero
7962 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7963 if (tkind == tkind_zero)
7964 x = build_int_cstu (tkind_type, tkind);
7965 else
7967 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
7968 x = build3 (COND_EXPR, tkind_type,
7969 fold_build2 (EQ_EXPR, boolean_type_node,
7970 unshare_expr (s), size_zero_node),
7971 build_int_cstu (tkind_type, tkind_zero),
7972 build_int_cstu (tkind_type, tkind));
7974 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
7975 if (nc && nc != c)
7976 c = nc;
7977 break;
7979 case OMP_CLAUSE_FIRSTPRIVATE:
7980 if (is_oacc_parallel (ctx))
7981 goto oacc_firstprivate_map;
7982 ovar = OMP_CLAUSE_DECL (c);
7983 if (omp_is_reference (ovar))
7984 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7985 else
7986 talign = DECL_ALIGN_UNIT (ovar);
7987 var = lookup_decl_in_outer_ctx (ovar, ctx);
7988 x = build_sender_ref (ovar, ctx);
7989 tkind = GOMP_MAP_FIRSTPRIVATE;
7990 type = TREE_TYPE (ovar);
7991 if (omp_is_reference (ovar))
7992 type = TREE_TYPE (type);
7993 if ((INTEGRAL_TYPE_P (type)
7994 && TYPE_PRECISION (type) <= POINTER_SIZE)
7995 || TREE_CODE (type) == POINTER_TYPE)
7997 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
7998 tree t = var;
7999 if (omp_is_reference (var))
8000 t = build_simple_mem_ref (var);
8001 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8002 TREE_NO_WARNING (var) = 1;
8003 if (TREE_CODE (type) != POINTER_TYPE)
8004 t = fold_convert (pointer_sized_int_node, t);
8005 t = fold_convert (TREE_TYPE (x), t);
8006 gimplify_assign (x, t, &ilist);
8008 else if (omp_is_reference (var))
8009 gimplify_assign (x, var, &ilist);
8010 else if (is_gimple_reg (var))
8012 tree avar = create_tmp_var (TREE_TYPE (var));
8013 mark_addressable (avar);
8014 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8015 TREE_NO_WARNING (var) = 1;
8016 gimplify_assign (avar, var, &ilist);
8017 avar = build_fold_addr_expr (avar);
8018 gimplify_assign (x, avar, &ilist);
8020 else
8022 var = build_fold_addr_expr (var);
8023 gimplify_assign (x, var, &ilist);
8025 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8026 s = size_int (0);
8027 else if (omp_is_reference (ovar))
8028 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8029 else
8030 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8031 s = fold_convert (size_type_node, s);
8032 purpose = size_int (map_idx++);
8033 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8034 if (TREE_CODE (s) != INTEGER_CST)
8035 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8037 gcc_checking_assert (tkind
8038 < (HOST_WIDE_INT_C (1U) << talign_shift));
8039 talign = ceil_log2 (talign);
8040 tkind |= talign << talign_shift;
8041 gcc_checking_assert (tkind
8042 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8043 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8044 build_int_cstu (tkind_type, tkind));
8045 break;
8047 case OMP_CLAUSE_USE_DEVICE_PTR:
8048 case OMP_CLAUSE_IS_DEVICE_PTR:
8049 ovar = OMP_CLAUSE_DECL (c);
8050 var = lookup_decl_in_outer_ctx (ovar, ctx);
8051 x = build_sender_ref (ovar, ctx);
8052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8053 tkind = GOMP_MAP_USE_DEVICE_PTR;
8054 else
8055 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8056 type = TREE_TYPE (ovar);
8057 if (TREE_CODE (type) == ARRAY_TYPE)
8058 var = build_fold_addr_expr (var);
8059 else
8061 if (omp_is_reference (ovar))
8063 type = TREE_TYPE (type);
8064 if (TREE_CODE (type) != ARRAY_TYPE)
8065 var = build_simple_mem_ref (var);
8066 var = fold_convert (TREE_TYPE (x), var);
8069 gimplify_assign (x, var, &ilist);
8070 s = size_int (0);
8071 purpose = size_int (map_idx++);
8072 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8073 gcc_checking_assert (tkind
8074 < (HOST_WIDE_INT_C (1U) << talign_shift));
8075 gcc_checking_assert (tkind
8076 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8077 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8078 build_int_cstu (tkind_type, tkind));
8079 break;
8082 gcc_assert (map_idx == map_cnt);
8084 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8085 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8086 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8087 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8088 for (int i = 1; i <= 2; i++)
8089 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8091 gimple_seq initlist = NULL;
8092 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8093 TREE_VEC_ELT (t, i)),
8094 &initlist, true, NULL_TREE);
8095 gimple_seq_add_seq (&ilist, initlist);
8097 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8098 NULL);
8099 TREE_THIS_VOLATILE (clobber) = 1;
8100 gimple_seq_add_stmt (&olist,
8101 gimple_build_assign (TREE_VEC_ELT (t, i),
8102 clobber));
8105 tree clobber = build_constructor (ctx->record_type, NULL);
8106 TREE_THIS_VOLATILE (clobber) = 1;
8107 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8108 clobber));
8111 /* Once all the expansions are done, sequence all the different
8112 fragments inside gimple_omp_body. */
8114 new_body = NULL;
8116 if (offloaded
8117 && ctx->record_type)
8119 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8120 /* fixup_child_record_type might have changed receiver_decl's type. */
8121 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8122 gimple_seq_add_stmt (&new_body,
8123 gimple_build_assign (ctx->receiver_decl, t));
8125 gimple_seq_add_seq (&new_body, fplist);
8127 if (offloaded || data_region)
8129 tree prev = NULL_TREE;
8130 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8131 switch (OMP_CLAUSE_CODE (c))
8133 tree var, x;
8134 default:
8135 break;
8136 case OMP_CLAUSE_FIRSTPRIVATE:
8137 if (is_gimple_omp_oacc (ctx->stmt))
8138 break;
8139 var = OMP_CLAUSE_DECL (c);
8140 if (omp_is_reference (var)
8141 || is_gimple_reg_type (TREE_TYPE (var)))
8143 tree new_var = lookup_decl (var, ctx);
8144 tree type;
8145 type = TREE_TYPE (var);
8146 if (omp_is_reference (var))
8147 type = TREE_TYPE (type);
8148 if ((INTEGRAL_TYPE_P (type)
8149 && TYPE_PRECISION (type) <= POINTER_SIZE)
8150 || TREE_CODE (type) == POINTER_TYPE)
8152 x = build_receiver_ref (var, false, ctx);
8153 if (TREE_CODE (type) != POINTER_TYPE)
8154 x = fold_convert (pointer_sized_int_node, x);
8155 x = fold_convert (type, x);
8156 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8157 fb_rvalue);
8158 if (omp_is_reference (var))
8160 tree v = create_tmp_var_raw (type, get_name (var));
8161 gimple_add_tmp_var (v);
8162 TREE_ADDRESSABLE (v) = 1;
8163 gimple_seq_add_stmt (&new_body,
8164 gimple_build_assign (v, x));
8165 x = build_fold_addr_expr (v);
8167 gimple_seq_add_stmt (&new_body,
8168 gimple_build_assign (new_var, x));
8170 else
8172 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8173 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8174 fb_rvalue);
8175 gimple_seq_add_stmt (&new_body,
8176 gimple_build_assign (new_var, x));
8179 else if (is_variable_sized (var))
8181 tree pvar = DECL_VALUE_EXPR (var);
8182 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8183 pvar = TREE_OPERAND (pvar, 0);
8184 gcc_assert (DECL_P (pvar));
8185 tree new_var = lookup_decl (pvar, ctx);
8186 x = build_receiver_ref (var, false, ctx);
8187 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8188 gimple_seq_add_stmt (&new_body,
8189 gimple_build_assign (new_var, x));
8191 break;
8192 case OMP_CLAUSE_PRIVATE:
8193 if (is_gimple_omp_oacc (ctx->stmt))
8194 break;
8195 var = OMP_CLAUSE_DECL (c);
8196 if (omp_is_reference (var))
8198 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8199 tree new_var = lookup_decl (var, ctx);
8200 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8201 if (TREE_CONSTANT (x))
8203 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8204 get_name (var));
8205 gimple_add_tmp_var (x);
8206 TREE_ADDRESSABLE (x) = 1;
8207 x = build_fold_addr_expr_loc (clause_loc, x);
8209 else
8210 break;
8212 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8213 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8214 gimple_seq_add_stmt (&new_body,
8215 gimple_build_assign (new_var, x));
8217 break;
8218 case OMP_CLAUSE_USE_DEVICE_PTR:
8219 case OMP_CLAUSE_IS_DEVICE_PTR:
8220 var = OMP_CLAUSE_DECL (c);
8221 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8222 x = build_sender_ref (var, ctx);
8223 else
8224 x = build_receiver_ref (var, false, ctx);
8225 if (is_variable_sized (var))
8227 tree pvar = DECL_VALUE_EXPR (var);
8228 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8229 pvar = TREE_OPERAND (pvar, 0);
8230 gcc_assert (DECL_P (pvar));
8231 tree new_var = lookup_decl (pvar, ctx);
8232 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8233 gimple_seq_add_stmt (&new_body,
8234 gimple_build_assign (new_var, x));
8236 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8238 tree new_var = lookup_decl (var, ctx);
8239 new_var = DECL_VALUE_EXPR (new_var);
8240 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8241 new_var = TREE_OPERAND (new_var, 0);
8242 gcc_assert (DECL_P (new_var));
8243 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8244 gimple_seq_add_stmt (&new_body,
8245 gimple_build_assign (new_var, x));
8247 else
8249 tree type = TREE_TYPE (var);
8250 tree new_var = lookup_decl (var, ctx);
8251 if (omp_is_reference (var))
8253 type = TREE_TYPE (type);
8254 if (TREE_CODE (type) != ARRAY_TYPE)
8256 tree v = create_tmp_var_raw (type, get_name (var));
8257 gimple_add_tmp_var (v);
8258 TREE_ADDRESSABLE (v) = 1;
8259 x = fold_convert (type, x);
8260 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8261 fb_rvalue);
8262 gimple_seq_add_stmt (&new_body,
8263 gimple_build_assign (v, x));
8264 x = build_fold_addr_expr (v);
8267 new_var = DECL_VALUE_EXPR (new_var);
8268 x = fold_convert (TREE_TYPE (new_var), x);
8269 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8270 gimple_seq_add_stmt (&new_body,
8271 gimple_build_assign (new_var, x));
8273 break;
8275 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8276 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8277 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8278 or references to VLAs. */
8279 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8280 switch (OMP_CLAUSE_CODE (c))
8282 tree var;
8283 default:
8284 break;
8285 case OMP_CLAUSE_MAP:
8286 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8287 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8289 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8290 HOST_WIDE_INT offset = 0;
8291 gcc_assert (prev);
8292 var = OMP_CLAUSE_DECL (c);
8293 if (DECL_P (var)
8294 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8295 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8296 ctx))
8297 && varpool_node::get_create (var)->offloadable)
8298 break;
8299 if (TREE_CODE (var) == INDIRECT_REF
8300 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8301 var = TREE_OPERAND (var, 0);
8302 if (TREE_CODE (var) == COMPONENT_REF)
8304 var = get_addr_base_and_unit_offset (var, &offset);
8305 gcc_assert (var != NULL_TREE && DECL_P (var));
8307 else if (DECL_SIZE (var)
8308 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8310 tree var2 = DECL_VALUE_EXPR (var);
8311 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8312 var2 = TREE_OPERAND (var2, 0);
8313 gcc_assert (DECL_P (var2));
8314 var = var2;
8316 tree new_var = lookup_decl (var, ctx), x;
8317 tree type = TREE_TYPE (new_var);
8318 bool is_ref;
8319 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8320 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8321 == COMPONENT_REF))
8323 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8324 is_ref = true;
8325 new_var = build2 (MEM_REF, type,
8326 build_fold_addr_expr (new_var),
8327 build_int_cst (build_pointer_type (type),
8328 offset));
8330 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8332 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8333 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8334 new_var = build2 (MEM_REF, type,
8335 build_fold_addr_expr (new_var),
8336 build_int_cst (build_pointer_type (type),
8337 offset));
8339 else
8340 is_ref = omp_is_reference (var);
8341 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8342 is_ref = false;
8343 bool ref_to_array = false;
8344 if (is_ref)
8346 type = TREE_TYPE (type);
8347 if (TREE_CODE (type) == ARRAY_TYPE)
8349 type = build_pointer_type (type);
8350 ref_to_array = true;
8353 else if (TREE_CODE (type) == ARRAY_TYPE)
8355 tree decl2 = DECL_VALUE_EXPR (new_var);
8356 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8357 decl2 = TREE_OPERAND (decl2, 0);
8358 gcc_assert (DECL_P (decl2));
8359 new_var = decl2;
8360 type = TREE_TYPE (new_var);
8362 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8363 x = fold_convert_loc (clause_loc, type, x);
8364 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8366 tree bias = OMP_CLAUSE_SIZE (c);
8367 if (DECL_P (bias))
8368 bias = lookup_decl (bias, ctx);
8369 bias = fold_convert_loc (clause_loc, sizetype, bias);
8370 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8371 bias);
8372 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8373 TREE_TYPE (x), x, bias);
8375 if (ref_to_array)
8376 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8377 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8378 if (is_ref && !ref_to_array)
8380 tree t = create_tmp_var_raw (type, get_name (var));
8381 gimple_add_tmp_var (t);
8382 TREE_ADDRESSABLE (t) = 1;
8383 gimple_seq_add_stmt (&new_body,
8384 gimple_build_assign (t, x));
8385 x = build_fold_addr_expr_loc (clause_loc, t);
8387 gimple_seq_add_stmt (&new_body,
8388 gimple_build_assign (new_var, x));
8389 prev = NULL_TREE;
8391 else if (OMP_CLAUSE_CHAIN (c)
8392 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8393 == OMP_CLAUSE_MAP
8394 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8395 == GOMP_MAP_FIRSTPRIVATE_POINTER
8396 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8397 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8398 prev = c;
8399 break;
8400 case OMP_CLAUSE_PRIVATE:
8401 var = OMP_CLAUSE_DECL (c);
8402 if (is_variable_sized (var))
8404 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8405 tree new_var = lookup_decl (var, ctx);
8406 tree pvar = DECL_VALUE_EXPR (var);
8407 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8408 pvar = TREE_OPERAND (pvar, 0);
8409 gcc_assert (DECL_P (pvar));
8410 tree new_pvar = lookup_decl (pvar, ctx);
8411 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8412 tree al = size_int (DECL_ALIGN (var));
8413 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8414 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8415 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8416 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8417 gimple_seq_add_stmt (&new_body,
8418 gimple_build_assign (new_pvar, x));
8420 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8422 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8423 tree new_var = lookup_decl (var, ctx);
8424 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8425 if (TREE_CONSTANT (x))
8426 break;
8427 else
8429 tree atmp
8430 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8431 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8432 tree al = size_int (TYPE_ALIGN (rtype));
8433 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8436 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8437 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8438 gimple_seq_add_stmt (&new_body,
8439 gimple_build_assign (new_var, x));
8441 break;
8444 gimple_seq fork_seq = NULL;
8445 gimple_seq join_seq = NULL;
8447 if (is_oacc_parallel (ctx))
8449 /* If there are reductions on the offloaded region itself, treat
8450 them as a dummy GANG loop. */
8451 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8453 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8454 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8457 gimple_seq_add_seq (&new_body, fork_seq);
8458 gimple_seq_add_seq (&new_body, tgt_body);
8459 gimple_seq_add_seq (&new_body, join_seq);
8461 if (offloaded)
8462 new_body = maybe_catch_exception (new_body);
8464 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8465 gimple_omp_set_body (stmt, new_body);
8468 bind = gimple_build_bind (NULL, NULL,
8469 tgt_bind ? gimple_bind_block (tgt_bind)
8470 : NULL_TREE);
8471 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8472 gimple_bind_add_seq (bind, ilist);
8473 gimple_bind_add_stmt (bind, stmt);
8474 gimple_bind_add_seq (bind, olist);
8476 pop_gimplify_context (NULL);
8478 if (dep_bind)
8480 gimple_bind_add_seq (dep_bind, dep_ilist);
8481 gimple_bind_add_stmt (dep_bind, bind);
8482 gimple_bind_add_seq (dep_bind, dep_olist);
8483 pop_gimplify_context (dep_bind);
8487 /* Expand code for an OpenMP teams directive. */
8489 static void
8490 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8492 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8493 push_gimplify_context ();
8495 tree block = make_node (BLOCK);
8496 gbind *bind = gimple_build_bind (NULL, NULL, block);
8497 gsi_replace (gsi_p, bind, true);
8498 gimple_seq bind_body = NULL;
8499 gimple_seq dlist = NULL;
8500 gimple_seq olist = NULL;
8502 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8503 OMP_CLAUSE_NUM_TEAMS);
8504 if (num_teams == NULL_TREE)
8505 num_teams = build_int_cst (unsigned_type_node, 0);
8506 else
8508 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8509 num_teams = fold_convert (unsigned_type_node, num_teams);
8510 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8512 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8513 OMP_CLAUSE_THREAD_LIMIT);
8514 if (thread_limit == NULL_TREE)
8515 thread_limit = build_int_cst (unsigned_type_node, 0);
8516 else
8518 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8519 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8520 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8521 fb_rvalue);
8524 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8525 &bind_body, &dlist, ctx, NULL);
8526 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8527 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8528 if (!gimple_omp_teams_grid_phony (teams_stmt))
8530 gimple_seq_add_stmt (&bind_body, teams_stmt);
8531 location_t loc = gimple_location (teams_stmt);
8532 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8533 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8534 gimple_set_location (call, loc);
8535 gimple_seq_add_stmt (&bind_body, call);
8538 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8539 gimple_omp_set_body (teams_stmt, NULL);
8540 gimple_seq_add_seq (&bind_body, olist);
8541 gimple_seq_add_seq (&bind_body, dlist);
8542 if (!gimple_omp_teams_grid_phony (teams_stmt))
8543 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8544 gimple_bind_set_body (bind, bind_body);
8546 pop_gimplify_context (bind);
8548 gimple_bind_append_vars (bind, ctx->block_vars);
8549 BLOCK_VARS (block) = ctx->block_vars;
8550 if (BLOCK_VARS (block))
8551 TREE_USED (block) = 1;
8554 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8556 static void
8557 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8559 gimple *stmt = gsi_stmt (*gsi_p);
8560 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8561 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8562 gimple_build_omp_return (false));
8566 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8567 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8568 of OMP context, but with task_shared_vars set. */
8570 static tree
8571 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8572 void *data)
8574 tree t = *tp;
8576 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8577 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8578 return t;
8580 if (task_shared_vars
8581 && DECL_P (t)
8582 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8583 return t;
8585 /* If a global variable has been privatized, TREE_CONSTANT on
8586 ADDR_EXPR might be wrong. */
8587 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8588 recompute_tree_invariant_for_addr_expr (t);
8590 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8591 return NULL_TREE;
8594 /* Data to be communicated between lower_omp_regimplify_operands and
8595 lower_omp_regimplify_operands_p. */
8597 struct lower_omp_regimplify_operands_data
8599 omp_context *ctx;
8600 vec<tree> *decls;
8603 /* Helper function for lower_omp_regimplify_operands. Find
8604 omp_member_access_dummy_var vars and adjust temporarily their
8605 DECL_VALUE_EXPRs if needed. */
8607 static tree
8608 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8609 void *data)
8611 tree t = omp_member_access_dummy_var (*tp);
8612 if (t)
8614 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8615 lower_omp_regimplify_operands_data *ldata
8616 = (lower_omp_regimplify_operands_data *) wi->info;
8617 tree o = maybe_lookup_decl (t, ldata->ctx);
8618 if (o != t)
8620 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8621 ldata->decls->safe_push (*tp);
8622 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8623 SET_DECL_VALUE_EXPR (*tp, v);
8626 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8627 return NULL_TREE;
8630 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8631 of omp_member_access_dummy_var vars during regimplification. */
8633 static void
8634 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8635 gimple_stmt_iterator *gsi_p)
8637 auto_vec<tree, 10> decls;
8638 if (ctx)
8640 struct walk_stmt_info wi;
8641 memset (&wi, '\0', sizeof (wi));
8642 struct lower_omp_regimplify_operands_data data;
8643 data.ctx = ctx;
8644 data.decls = &decls;
8645 wi.info = &data;
8646 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8648 gimple_regimplify_operands (stmt, gsi_p);
8649 while (!decls.is_empty ())
8651 tree t = decls.pop ();
8652 tree v = decls.pop ();
8653 SET_DECL_VALUE_EXPR (t, v);
8657 static void
8658 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8660 gimple *stmt = gsi_stmt (*gsi_p);
8661 struct walk_stmt_info wi;
8662 gcall *call_stmt;
8664 if (gimple_has_location (stmt))
8665 input_location = gimple_location (stmt);
8667 if (task_shared_vars)
8668 memset (&wi, '\0', sizeof (wi));
8670 /* If we have issued syntax errors, avoid doing any heavy lifting.
8671 Just replace the OMP directives with a NOP to avoid
8672 confusing RTL expansion. */
8673 if (seen_error () && is_gimple_omp (stmt))
8675 gsi_replace (gsi_p, gimple_build_nop (), true);
8676 return;
8679 switch (gimple_code (stmt))
8681 case GIMPLE_COND:
8683 gcond *cond_stmt = as_a <gcond *> (stmt);
8684 if ((ctx || task_shared_vars)
8685 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8686 lower_omp_regimplify_p,
8687 ctx ? NULL : &wi, NULL)
8688 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8689 lower_omp_regimplify_p,
8690 ctx ? NULL : &wi, NULL)))
8691 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8693 break;
8694 case GIMPLE_CATCH:
8695 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8696 break;
8697 case GIMPLE_EH_FILTER:
8698 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8699 break;
8700 case GIMPLE_TRY:
8701 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8702 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8703 break;
8704 case GIMPLE_TRANSACTION:
8705 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8706 ctx);
8707 break;
8708 case GIMPLE_BIND:
8709 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8710 break;
8711 case GIMPLE_OMP_PARALLEL:
8712 case GIMPLE_OMP_TASK:
8713 ctx = maybe_lookup_ctx (stmt);
8714 gcc_assert (ctx);
8715 if (ctx->cancellable)
8716 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8717 lower_omp_taskreg (gsi_p, ctx);
8718 break;
8719 case GIMPLE_OMP_FOR:
8720 ctx = maybe_lookup_ctx (stmt);
8721 gcc_assert (ctx);
8722 if (ctx->cancellable)
8723 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8724 lower_omp_for (gsi_p, ctx);
8725 break;
8726 case GIMPLE_OMP_SECTIONS:
8727 ctx = maybe_lookup_ctx (stmt);
8728 gcc_assert (ctx);
8729 if (ctx->cancellable)
8730 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8731 lower_omp_sections (gsi_p, ctx);
8732 break;
8733 case GIMPLE_OMP_SINGLE:
8734 ctx = maybe_lookup_ctx (stmt);
8735 gcc_assert (ctx);
8736 lower_omp_single (gsi_p, ctx);
8737 break;
8738 case GIMPLE_OMP_MASTER:
8739 ctx = maybe_lookup_ctx (stmt);
8740 gcc_assert (ctx);
8741 lower_omp_master (gsi_p, ctx);
8742 break;
8743 case GIMPLE_OMP_TASKGROUP:
8744 ctx = maybe_lookup_ctx (stmt);
8745 gcc_assert (ctx);
8746 lower_omp_taskgroup (gsi_p, ctx);
8747 break;
8748 case GIMPLE_OMP_ORDERED:
8749 ctx = maybe_lookup_ctx (stmt);
8750 gcc_assert (ctx);
8751 lower_omp_ordered (gsi_p, ctx);
8752 break;
8753 case GIMPLE_OMP_CRITICAL:
8754 ctx = maybe_lookup_ctx (stmt);
8755 gcc_assert (ctx);
8756 lower_omp_critical (gsi_p, ctx);
8757 break;
8758 case GIMPLE_OMP_ATOMIC_LOAD:
8759 if ((ctx || task_shared_vars)
8760 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8761 as_a <gomp_atomic_load *> (stmt)),
8762 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8763 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8764 break;
8765 case GIMPLE_OMP_TARGET:
8766 ctx = maybe_lookup_ctx (stmt);
8767 gcc_assert (ctx);
8768 lower_omp_target (gsi_p, ctx);
8769 break;
8770 case GIMPLE_OMP_TEAMS:
8771 ctx = maybe_lookup_ctx (stmt);
8772 gcc_assert (ctx);
8773 lower_omp_teams (gsi_p, ctx);
8774 break;
8775 case GIMPLE_OMP_GRID_BODY:
8776 ctx = maybe_lookup_ctx (stmt);
8777 gcc_assert (ctx);
8778 lower_omp_grid_body (gsi_p, ctx);
8779 break;
8780 case GIMPLE_CALL:
8781 tree fndecl;
8782 call_stmt = as_a <gcall *> (stmt);
8783 fndecl = gimple_call_fndecl (call_stmt);
8784 if (fndecl
8785 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8786 switch (DECL_FUNCTION_CODE (fndecl))
8788 case BUILT_IN_GOMP_BARRIER:
8789 if (ctx == NULL)
8790 break;
8791 /* FALLTHRU */
8792 case BUILT_IN_GOMP_CANCEL:
8793 case BUILT_IN_GOMP_CANCELLATION_POINT:
8794 omp_context *cctx;
8795 cctx = ctx;
8796 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8797 cctx = cctx->outer;
8798 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8799 if (!cctx->cancellable)
8801 if (DECL_FUNCTION_CODE (fndecl)
8802 == BUILT_IN_GOMP_CANCELLATION_POINT)
8804 stmt = gimple_build_nop ();
8805 gsi_replace (gsi_p, stmt, false);
8807 break;
8809 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8811 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8812 gimple_call_set_fndecl (call_stmt, fndecl);
8813 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8815 tree lhs;
8816 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8817 gimple_call_set_lhs (call_stmt, lhs);
8818 tree fallthru_label;
8819 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8820 gimple *g;
8821 g = gimple_build_label (fallthru_label);
8822 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8823 g = gimple_build_cond (NE_EXPR, lhs,
8824 fold_convert (TREE_TYPE (lhs),
8825 boolean_false_node),
8826 cctx->cancel_label, fallthru_label);
8827 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8828 break;
8829 default:
8830 break;
8832 /* FALLTHRU */
8833 default:
8834 if ((ctx || task_shared_vars)
8835 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8836 ctx ? NULL : &wi))
8838 /* Just remove clobbers, this should happen only if we have
8839 "privatized" local addressable variables in SIMD regions,
8840 the clobber isn't needed in that case and gimplifying address
8841 of the ARRAY_REF into a pointer and creating MEM_REF based
8842 clobber would create worse code than we get with the clobber
8843 dropped. */
8844 if (gimple_clobber_p (stmt))
8846 gsi_replace (gsi_p, gimple_build_nop (), true);
8847 break;
8849 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8851 break;
8855 static void
8856 lower_omp (gimple_seq *body, omp_context *ctx)
8858 location_t saved_location = input_location;
8859 gimple_stmt_iterator gsi;
8860 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8861 lower_omp_1 (&gsi, ctx);
8862 /* During gimplification, we haven't folded statments inside offloading
8863 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8864 if (target_nesting_level || taskreg_nesting_level)
8865 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8866 fold_stmt (&gsi);
8867 input_location = saved_location;
8870 /* Main entry point. */
8872 static unsigned int
8873 execute_lower_omp (void)
8875 gimple_seq body;
8876 int i;
8877 omp_context *ctx;
8879 /* This pass always runs, to provide PROP_gimple_lomp.
8880 But often, there is nothing to do. */
8881 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8882 && flag_openmp_simd == 0)
8883 return 0;
8885 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8886 delete_omp_context);
8888 body = gimple_body (current_function_decl);
8890 if (hsa_gen_requested_p ())
8891 omp_grid_gridify_all_targets (&body);
8893 scan_omp (&body, NULL);
8894 gcc_assert (taskreg_nesting_level == 0);
8895 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8896 finish_taskreg_scan (ctx);
8897 taskreg_contexts.release ();
8899 if (all_contexts->root)
8901 if (task_shared_vars)
8902 push_gimplify_context ();
8903 lower_omp (&body, NULL);
8904 if (task_shared_vars)
8905 pop_gimplify_context (NULL);
8908 if (all_contexts)
8910 splay_tree_delete (all_contexts);
8911 all_contexts = NULL;
8913 BITMAP_FREE (task_shared_vars);
8914 return 0;
8917 namespace {
8919 const pass_data pass_data_lower_omp =
8921 GIMPLE_PASS, /* type */
8922 "omplower", /* name */
8923 OPTGROUP_OMP, /* optinfo_flags */
8924 TV_NONE, /* tv_id */
8925 PROP_gimple_any, /* properties_required */
8926 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8927 0, /* properties_destroyed */
8928 0, /* todo_flags_start */
8929 0, /* todo_flags_finish */
8932 class pass_lower_omp : public gimple_opt_pass
8934 public:
8935 pass_lower_omp (gcc::context *ctxt)
8936 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8939 /* opt_pass methods: */
8940 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8942 }; // class pass_lower_omp
8944 } // anon namespace
8946 gimple_opt_pass *
8947 make_pass_lower_omp (gcc::context *ctxt)
8949 return new pass_lower_omp (ctxt);
8952 /* The following is a utility to diagnose structured block violations.
8953 It is not part of the "omplower" pass, as that's invoked too late. It
8954 should be invoked by the respective front ends after gimplification. */
8956 static splay_tree all_labels;
8958 /* Check for mismatched contexts and generate an error if needed. Return
8959 true if an error is detected. */
8961 static bool
8962 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
8963 gimple *branch_ctx, gimple *label_ctx)
8965 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
8966 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
8968 if (label_ctx == branch_ctx)
8969 return false;
8971 const char* kind = NULL;
8973 if (flag_cilkplus)
8975 if ((branch_ctx
8976 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
8977 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
8978 || (label_ctx
8979 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
8980 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
8981 kind = "Cilk Plus";
8983 if (flag_openacc)
8985 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
8986 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
8988 gcc_checking_assert (kind == NULL);
8989 kind = "OpenACC";
8992 if (kind == NULL)
8994 gcc_checking_assert (flag_openmp);
8995 kind = "OpenMP";
8998 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
8999 so we could traverse it and issue a correct "exit" or "enter" error
9000 message upon a structured block violation.
9002 We built the context by building a list with tree_cons'ing, but there is
9003 no easy counterpart in gimple tuples. It seems like far too much work
9004 for issuing exit/enter error messages. If someone really misses the
9005 distinct error message... patches welcome. */
9007 #if 0
9008 /* Try to avoid confusing the user by producing and error message
9009 with correct "exit" or "enter" verbiage. We prefer "exit"
9010 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9011 if (branch_ctx == NULL)
9012 exit_p = false;
9013 else
9015 while (label_ctx)
9017 if (TREE_VALUE (label_ctx) == branch_ctx)
9019 exit_p = false;
9020 break;
9022 label_ctx = TREE_CHAIN (label_ctx);
9026 if (exit_p)
9027 error ("invalid exit from %s structured block", kind);
9028 else
9029 error ("invalid entry to %s structured block", kind);
9030 #endif
9032 /* If it's obvious we have an invalid entry, be specific about the error. */
9033 if (branch_ctx == NULL)
9034 error ("invalid entry to %s structured block", kind);
9035 else
9037 /* Otherwise, be vague and lazy, but efficient. */
9038 error ("invalid branch to/from %s structured block", kind);
9041 gsi_replace (gsi_p, gimple_build_nop (), false);
9042 return true;
9045 /* Pass 1: Create a minimal tree of structured blocks, and record
9046 where each label is found. */
9048 static tree
9049 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9050 struct walk_stmt_info *wi)
9052 gimple *context = (gimple *) wi->info;
9053 gimple *inner_context;
9054 gimple *stmt = gsi_stmt (*gsi_p);
9056 *handled_ops_p = true;
9058 switch (gimple_code (stmt))
9060 WALK_SUBSTMTS;
9062 case GIMPLE_OMP_PARALLEL:
9063 case GIMPLE_OMP_TASK:
9064 case GIMPLE_OMP_SECTIONS:
9065 case GIMPLE_OMP_SINGLE:
9066 case GIMPLE_OMP_SECTION:
9067 case GIMPLE_OMP_MASTER:
9068 case GIMPLE_OMP_ORDERED:
9069 case GIMPLE_OMP_CRITICAL:
9070 case GIMPLE_OMP_TARGET:
9071 case GIMPLE_OMP_TEAMS:
9072 case GIMPLE_OMP_TASKGROUP:
9073 /* The minimal context here is just the current OMP construct. */
9074 inner_context = stmt;
9075 wi->info = inner_context;
9076 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9077 wi->info = context;
9078 break;
9080 case GIMPLE_OMP_FOR:
9081 inner_context = stmt;
9082 wi->info = inner_context;
9083 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9084 walk them. */
9085 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9086 diagnose_sb_1, NULL, wi);
9087 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9088 wi->info = context;
9089 break;
9091 case GIMPLE_LABEL:
9092 splay_tree_insert (all_labels,
9093 (splay_tree_key) gimple_label_label (
9094 as_a <glabel *> (stmt)),
9095 (splay_tree_value) context);
9096 break;
9098 default:
9099 break;
9102 return NULL_TREE;
9105 /* Pass 2: Check each branch and see if its context differs from that of
9106 the destination label's context. */
9108 static tree
9109 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9110 struct walk_stmt_info *wi)
9112 gimple *context = (gimple *) wi->info;
9113 splay_tree_node n;
9114 gimple *stmt = gsi_stmt (*gsi_p);
9116 *handled_ops_p = true;
9118 switch (gimple_code (stmt))
9120 WALK_SUBSTMTS;
9122 case GIMPLE_OMP_PARALLEL:
9123 case GIMPLE_OMP_TASK:
9124 case GIMPLE_OMP_SECTIONS:
9125 case GIMPLE_OMP_SINGLE:
9126 case GIMPLE_OMP_SECTION:
9127 case GIMPLE_OMP_MASTER:
9128 case GIMPLE_OMP_ORDERED:
9129 case GIMPLE_OMP_CRITICAL:
9130 case GIMPLE_OMP_TARGET:
9131 case GIMPLE_OMP_TEAMS:
9132 case GIMPLE_OMP_TASKGROUP:
9133 wi->info = stmt;
9134 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9135 wi->info = context;
9136 break;
9138 case GIMPLE_OMP_FOR:
9139 wi->info = stmt;
9140 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9141 walk them. */
9142 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9143 diagnose_sb_2, NULL, wi);
9144 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9145 wi->info = context;
9146 break;
9148 case GIMPLE_COND:
9150 gcond *cond_stmt = as_a <gcond *> (stmt);
9151 tree lab = gimple_cond_true_label (cond_stmt);
9152 if (lab)
9154 n = splay_tree_lookup (all_labels,
9155 (splay_tree_key) lab);
9156 diagnose_sb_0 (gsi_p, context,
9157 n ? (gimple *) n->value : NULL);
9159 lab = gimple_cond_false_label (cond_stmt);
9160 if (lab)
9162 n = splay_tree_lookup (all_labels,
9163 (splay_tree_key) lab);
9164 diagnose_sb_0 (gsi_p, context,
9165 n ? (gimple *) n->value : NULL);
9168 break;
9170 case GIMPLE_GOTO:
9172 tree lab = gimple_goto_dest (stmt);
9173 if (TREE_CODE (lab) != LABEL_DECL)
9174 break;
9176 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9177 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9179 break;
9181 case GIMPLE_SWITCH:
9183 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9184 unsigned int i;
9185 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9187 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9188 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9189 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9190 break;
9193 break;
9195 case GIMPLE_RETURN:
9196 diagnose_sb_0 (gsi_p, context, NULL);
9197 break;
9199 default:
9200 break;
9203 return NULL_TREE;
9206 static unsigned int
9207 diagnose_omp_structured_block_errors (void)
9209 struct walk_stmt_info wi;
9210 gimple_seq body = gimple_body (current_function_decl);
9212 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9214 memset (&wi, 0, sizeof (wi));
9215 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9217 memset (&wi, 0, sizeof (wi));
9218 wi.want_locations = true;
9219 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9221 gimple_set_body (current_function_decl, body);
9223 splay_tree_delete (all_labels);
9224 all_labels = NULL;
9226 return 0;
9229 namespace {
9231 const pass_data pass_data_diagnose_omp_blocks =
9233 GIMPLE_PASS, /* type */
9234 "*diagnose_omp_blocks", /* name */
9235 OPTGROUP_OMP, /* optinfo_flags */
9236 TV_NONE, /* tv_id */
9237 PROP_gimple_any, /* properties_required */
9238 0, /* properties_provided */
9239 0, /* properties_destroyed */
9240 0, /* todo_flags_start */
9241 0, /* todo_flags_finish */
9244 class pass_diagnose_omp_blocks : public gimple_opt_pass
9246 public:
9247 pass_diagnose_omp_blocks (gcc::context *ctxt)
9248 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9251 /* opt_pass methods: */
9252 virtual bool gate (function *)
9254 return flag_cilkplus || flag_openacc || flag_openmp;
9256 virtual unsigned int execute (function *)
9258 return diagnose_omp_structured_block_errors ();
9261 }; // class pass_diagnose_omp_blocks
9263 } // anon namespace
9265 gimple_opt_pass *
9266 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9268 return new pass_diagnose_omp_blocks (ctxt);
9272 #include "gt-omp-low.h"