[testsuite] Require shared effective target for some lto.exp tests
[official-gcc.git] / gcc / omp-low.c
blob15209a3c3544efd3d30dd3db257a2f43a2299c7e
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* What to do with variables with implicitly determined sharing
112 attributes. */
113 enum omp_clause_default_kind default_kind;
115 /* Nesting depth of this context. Used to beautify error messages re
116 invalid gotos. The outermost ctx is depth 1, with depth 0 being
117 reserved for the main body of the function. */
118 int depth;
120 /* True if this parallel directive is nested within another. */
121 bool is_nested;
123 /* True if this construct can be cancelled. */
124 bool cancellable;
127 static splay_tree all_contexts;
128 static int taskreg_nesting_level;
129 static int target_nesting_level;
130 static bitmap task_shared_vars;
131 static vec<omp_context *> taskreg_contexts;
133 static void scan_omp (gimple_seq *, omp_context *);
134 static tree scan_omp_1_op (tree *, int *, void *);
136 #define WALK_SUBSTMTS \
137 case GIMPLE_BIND: \
138 case GIMPLE_TRY: \
139 case GIMPLE_CATCH: \
140 case GIMPLE_EH_FILTER: \
141 case GIMPLE_TRANSACTION: \
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
144 break;
146 /* Return true if CTX corresponds to an oacc parallel region. */
148 static bool
149 is_oacc_parallel (omp_context *ctx)
151 enum gimple_code outer_type = gimple_code (ctx->stmt);
152 return ((outer_type == GIMPLE_OMP_TARGET)
153 && (gimple_omp_target_kind (ctx->stmt)
154 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157 /* Return true if CTX corresponds to an oacc kernels region. */
159 static bool
160 is_oacc_kernels (omp_context *ctx)
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_KERNELS));
168 /* If DECL is the artificial dummy VAR_DECL created for non-static
169 data member privatization, return the underlying "this" parameter,
170 otherwise return NULL. */
172 tree
173 omp_member_access_dummy_var (tree decl)
175 if (!VAR_P (decl)
176 || !DECL_ARTIFICIAL (decl)
177 || !DECL_IGNORED_P (decl)
178 || !DECL_HAS_VALUE_EXPR_P (decl)
179 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
180 return NULL_TREE;
182 tree v = DECL_VALUE_EXPR (decl);
183 if (TREE_CODE (v) != COMPONENT_REF)
184 return NULL_TREE;
186 while (1)
187 switch (TREE_CODE (v))
189 case COMPONENT_REF:
190 case MEM_REF:
191 case INDIRECT_REF:
192 CASE_CONVERT:
193 case POINTER_PLUS_EXPR:
194 v = TREE_OPERAND (v, 0);
195 continue;
196 case PARM_DECL:
197 if (DECL_CONTEXT (v) == current_function_decl
198 && DECL_ARTIFICIAL (v)
199 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
200 return v;
201 return NULL_TREE;
202 default:
203 return NULL_TREE;
207 /* Helper for unshare_and_remap, called through walk_tree. */
209 static tree
210 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
212 tree *pair = (tree *) data;
213 if (*tp == pair[0])
215 *tp = unshare_expr (pair[1]);
216 *walk_subtrees = 0;
218 else if (IS_TYPE_OR_DECL_P (*tp))
219 *walk_subtrees = 0;
220 return NULL_TREE;
223 /* Return unshare_expr (X) with all occurrences of FROM
224 replaced with TO. */
226 static tree
227 unshare_and_remap (tree x, tree from, tree to)
229 tree pair[2] = { from, to };
230 x = unshare_expr (x);
231 walk_tree (&x, unshare_and_remap_1, pair, NULL);
232 return x;
235 /* Convenience function for calling scan_omp_1_op on tree operands. */
237 static inline tree
238 scan_omp_op (tree *tp, omp_context *ctx)
240 struct walk_stmt_info wi;
242 memset (&wi, 0, sizeof (wi));
243 wi.info = ctx;
244 wi.want_locations = true;
246 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249 static void lower_omp (gimple_seq *, omp_context *);
250 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
251 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
253 /* Return true if CTX is for an omp parallel. */
255 static inline bool
256 is_parallel_ctx (omp_context *ctx)
258 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
262 /* Return true if CTX is for an omp task. */
264 static inline bool
265 is_task_ctx (omp_context *ctx)
267 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
271 /* Return true if CTX is for an omp taskloop. */
273 static inline bool
274 is_taskloop_ctx (omp_context *ctx)
276 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
277 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
281 /* Return true if CTX is for an omp parallel or omp task. */
283 static inline bool
284 is_taskreg_ctx (omp_context *ctx)
286 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
289 /* Return true if EXPR is variable sized. */
291 static inline bool
292 is_variable_sized (const_tree expr)
294 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297 /* Lookup variables. The "maybe" form
298 allows for the variable form to not have been entered, otherwise we
299 assert that the variable must have been entered. */
301 static inline tree
302 lookup_decl (tree var, omp_context *ctx)
304 tree *n = ctx->cb.decl_map->get (var);
305 return *n;
308 static inline tree
309 maybe_lookup_decl (const_tree var, omp_context *ctx)
311 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
312 return n ? *n : NULL_TREE;
315 static inline tree
316 lookup_field (tree var, omp_context *ctx)
318 splay_tree_node n;
319 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
320 return (tree) n->value;
323 static inline tree
324 lookup_sfield (splay_tree_key key, omp_context *ctx)
326 splay_tree_node n;
327 n = splay_tree_lookup (ctx->sfield_map
328 ? ctx->sfield_map : ctx->field_map, key);
329 return (tree) n->value;
332 static inline tree
333 lookup_sfield (tree var, omp_context *ctx)
335 return lookup_sfield ((splay_tree_key) var, ctx);
338 static inline tree
339 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
341 splay_tree_node n;
342 n = splay_tree_lookup (ctx->field_map, key);
343 return n ? (tree) n->value : NULL_TREE;
346 static inline tree
347 maybe_lookup_field (tree var, omp_context *ctx)
349 return maybe_lookup_field ((splay_tree_key) var, ctx);
352 /* Return true if DECL should be copied by pointer. SHARED_CTX is
353 the parallel context if DECL is to be shared. */
355 static bool
356 use_pointer_for_field (tree decl, omp_context *shared_ctx)
358 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
359 || TYPE_ATOMIC (TREE_TYPE (decl)))
360 return true;
362 /* We can only use copy-in/copy-out semantics for shared variables
363 when we know the value is not accessible from an outer scope. */
364 if (shared_ctx)
366 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
368 /* ??? Trivially accessible from anywhere. But why would we even
369 be passing an address in this case? Should we simply assert
370 this to be false, or should we have a cleanup pass that removes
371 these from the list of mappings? */
372 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
373 return true;
375 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
376 without analyzing the expression whether or not its location
377 is accessible to anyone else. In the case of nested parallel
378 regions it certainly may be. */
379 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
380 return true;
382 /* Do not use copy-in/copy-out for variables that have their
383 address taken. */
384 if (TREE_ADDRESSABLE (decl))
385 return true;
387 /* lower_send_shared_vars only uses copy-in, but not copy-out
388 for these. */
389 if (TREE_READONLY (decl)
390 || ((TREE_CODE (decl) == RESULT_DECL
391 || TREE_CODE (decl) == PARM_DECL)
392 && DECL_BY_REFERENCE (decl)))
393 return false;
395 /* Disallow copy-in/out in nested parallel if
396 decl is shared in outer parallel, otherwise
397 each thread could store the shared variable
398 in its own copy-in location, making the
399 variable no longer really shared. */
400 if (shared_ctx->is_nested)
402 omp_context *up;
404 for (up = shared_ctx->outer; up; up = up->outer)
405 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
406 break;
408 if (up)
410 tree c;
412 for (c = gimple_omp_taskreg_clauses (up->stmt);
413 c; c = OMP_CLAUSE_CHAIN (c))
414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
415 && OMP_CLAUSE_DECL (c) == decl)
416 break;
418 if (c)
419 goto maybe_mark_addressable_and_ret;
423 /* For tasks avoid using copy-in/out. As tasks can be
424 deferred or executed in different thread, when GOMP_task
425 returns, the task hasn't necessarily terminated. */
426 if (is_task_ctx (shared_ctx))
428 tree outer;
429 maybe_mark_addressable_and_ret:
430 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
431 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
433 /* Taking address of OUTER in lower_send_shared_vars
434 might need regimplification of everything that uses the
435 variable. */
436 if (!task_shared_vars)
437 task_shared_vars = BITMAP_ALLOC (NULL);
438 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
439 TREE_ADDRESSABLE (outer) = 1;
441 return true;
445 return false;
448 /* Construct a new automatic decl similar to VAR. */
450 static tree
451 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
453 tree copy = copy_var_decl (var, name, type);
455 DECL_CONTEXT (copy) = current_function_decl;
456 DECL_CHAIN (copy) = ctx->block_vars;
457 /* If VAR is listed in task_shared_vars, it means it wasn't
458 originally addressable and is just because task needs to take
459 it's address. But we don't need to take address of privatizations
460 from that var. */
461 if (TREE_ADDRESSABLE (var)
462 && task_shared_vars
463 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
464 TREE_ADDRESSABLE (copy) = 0;
465 ctx->block_vars = copy;
467 return copy;
470 static tree
471 omp_copy_decl_1 (tree var, omp_context *ctx)
473 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
476 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
477 as appropriate. */
478 static tree
479 omp_build_component_ref (tree obj, tree field)
481 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
482 if (TREE_THIS_VOLATILE (field))
483 TREE_THIS_VOLATILE (ret) |= 1;
484 if (TREE_READONLY (field))
485 TREE_READONLY (ret) |= 1;
486 return ret;
489 /* Build tree nodes to access the field for VAR on the receiver side. */
491 static tree
492 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
494 tree x, field = lookup_field (var, ctx);
496 /* If the receiver record type was remapped in the child function,
497 remap the field into the new record type. */
498 x = maybe_lookup_field (field, ctx);
499 if (x != NULL)
500 field = x;
502 x = build_simple_mem_ref (ctx->receiver_decl);
503 TREE_THIS_NOTRAP (x) = 1;
504 x = omp_build_component_ref (x, field);
505 if (by_ref)
507 x = build_simple_mem_ref (x);
508 TREE_THIS_NOTRAP (x) = 1;
511 return x;
514 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
515 of a parallel, this is a component reference; for workshare constructs
516 this is some variable. */
518 static tree
519 build_outer_var_ref (tree var, omp_context *ctx,
520 enum omp_clause_code code = OMP_CLAUSE_ERROR)
522 tree x;
524 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
525 x = var;
526 else if (is_variable_sized (var))
528 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
529 x = build_outer_var_ref (x, ctx, code);
530 x = build_simple_mem_ref (x);
532 else if (is_taskreg_ctx (ctx))
534 bool by_ref = use_pointer_for_field (var, NULL);
535 x = build_receiver_ref (var, by_ref, ctx);
537 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
538 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
539 || (code == OMP_CLAUSE_PRIVATE
540 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
541 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
542 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
544 /* #pragma omp simd isn't a worksharing construct, and can reference
545 even private vars in its linear etc. clauses.
546 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
547 to private vars in all worksharing constructs. */
548 x = NULL_TREE;
549 if (ctx->outer && is_taskreg_ctx (ctx))
550 x = lookup_decl (var, ctx->outer);
551 else if (ctx->outer)
552 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
553 if (x == NULL_TREE)
554 x = var;
556 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
558 gcc_assert (ctx->outer);
559 splay_tree_node n
560 = splay_tree_lookup (ctx->outer->field_map,
561 (splay_tree_key) &DECL_UID (var));
562 if (n == NULL)
564 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
565 x = var;
566 else
567 x = lookup_decl (var, ctx->outer);
569 else
571 tree field = (tree) n->value;
572 /* If the receiver record type was remapped in the child function,
573 remap the field into the new record type. */
574 x = maybe_lookup_field (field, ctx->outer);
575 if (x != NULL)
576 field = x;
578 x = build_simple_mem_ref (ctx->outer->receiver_decl);
579 x = omp_build_component_ref (x, field);
580 if (use_pointer_for_field (var, ctx->outer))
581 x = build_simple_mem_ref (x);
584 else if (ctx->outer)
586 omp_context *outer = ctx->outer;
587 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
589 outer = outer->outer;
590 gcc_assert (outer
591 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
593 x = lookup_decl (var, outer);
595 else if (omp_is_reference (var))
596 /* This can happen with orphaned constructs. If var is reference, it is
597 possible it is shared and as such valid. */
598 x = var;
599 else if (omp_member_access_dummy_var (var))
600 x = var;
601 else
602 gcc_unreachable ();
604 if (x == var)
606 tree t = omp_member_access_dummy_var (var);
607 if (t)
609 x = DECL_VALUE_EXPR (var);
610 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
611 if (o != t)
612 x = unshare_and_remap (x, t, o);
613 else
614 x = unshare_expr (x);
618 if (omp_is_reference (var))
619 x = build_simple_mem_ref (x);
621 return x;
624 /* Build tree nodes to access the field for VAR on the sender side. */
626 static tree
627 build_sender_ref (splay_tree_key key, omp_context *ctx)
629 tree field = lookup_sfield (key, ctx);
630 return omp_build_component_ref (ctx->sender_decl, field);
633 static tree
634 build_sender_ref (tree var, omp_context *ctx)
636 return build_sender_ref ((splay_tree_key) var, ctx);
639 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
640 BASE_POINTERS_RESTRICT, declare the field with restrict. */
642 static void
643 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
644 bool base_pointers_restrict = false)
646 tree field, type, sfield = NULL_TREE;
647 splay_tree_key key = (splay_tree_key) var;
649 if ((mask & 8) != 0)
651 key = (splay_tree_key) &DECL_UID (var);
652 gcc_checking_assert (key != (splay_tree_key) var);
654 gcc_assert ((mask & 1) == 0
655 || !splay_tree_lookup (ctx->field_map, key));
656 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
657 || !splay_tree_lookup (ctx->sfield_map, key));
658 gcc_assert ((mask & 3) == 3
659 || !is_gimple_omp_oacc (ctx->stmt));
661 type = TREE_TYPE (var);
662 /* Prevent redeclaring the var in the split-off function with a restrict
663 pointer type. Note that we only clear type itself, restrict qualifiers in
664 the pointed-to type will be ignored by points-to analysis. */
665 if (POINTER_TYPE_P (type)
666 && TYPE_RESTRICT (type))
667 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
669 if (mask & 4)
671 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
672 type = build_pointer_type (build_pointer_type (type));
674 else if (by_ref)
676 type = build_pointer_type (type);
677 if (base_pointers_restrict)
678 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
680 else if ((mask & 3) == 1 && omp_is_reference (var))
681 type = TREE_TYPE (type);
683 field = build_decl (DECL_SOURCE_LOCATION (var),
684 FIELD_DECL, DECL_NAME (var), type);
686 /* Remember what variable this field was created for. This does have a
687 side effect of making dwarf2out ignore this member, so for helpful
688 debugging we clear it later in delete_omp_context. */
689 DECL_ABSTRACT_ORIGIN (field) = var;
690 if (type == TREE_TYPE (var))
692 SET_DECL_ALIGN (field, DECL_ALIGN (var));
693 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
694 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
696 else
697 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
699 if ((mask & 3) == 3)
701 insert_field_into_struct (ctx->record_type, field);
702 if (ctx->srecord_type)
704 sfield = build_decl (DECL_SOURCE_LOCATION (var),
705 FIELD_DECL, DECL_NAME (var), type);
706 DECL_ABSTRACT_ORIGIN (sfield) = var;
707 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
708 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
709 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
710 insert_field_into_struct (ctx->srecord_type, sfield);
713 else
715 if (ctx->srecord_type == NULL_TREE)
717 tree t;
719 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
720 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
721 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
723 sfield = build_decl (DECL_SOURCE_LOCATION (t),
724 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
725 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
726 insert_field_into_struct (ctx->srecord_type, sfield);
727 splay_tree_insert (ctx->sfield_map,
728 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
729 (splay_tree_value) sfield);
732 sfield = field;
733 insert_field_into_struct ((mask & 1) ? ctx->record_type
734 : ctx->srecord_type, field);
737 if (mask & 1)
738 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
739 if ((mask & 2) && ctx->sfield_map)
740 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
743 static tree
744 install_var_local (tree var, omp_context *ctx)
746 tree new_var = omp_copy_decl_1 (var, ctx);
747 insert_decl_map (&ctx->cb, var, new_var);
748 return new_var;
751 /* Adjust the replacement for DECL in CTX for the new context. This means
752 copying the DECL_VALUE_EXPR, and fixing up the type. */
754 static void
755 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
757 tree new_decl, size;
759 new_decl = lookup_decl (decl, ctx);
761 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
763 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
764 && DECL_HAS_VALUE_EXPR_P (decl))
766 tree ve = DECL_VALUE_EXPR (decl);
767 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
768 SET_DECL_VALUE_EXPR (new_decl, ve);
769 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
772 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
774 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
775 if (size == error_mark_node)
776 size = TYPE_SIZE (TREE_TYPE (new_decl));
777 DECL_SIZE (new_decl) = size;
779 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
780 if (size == error_mark_node)
781 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
782 DECL_SIZE_UNIT (new_decl) = size;
786 /* The callback for remap_decl. Search all containing contexts for a
787 mapping of the variable; this avoids having to duplicate the splay
788 tree ahead of time. We know a mapping doesn't already exist in the
789 given context. Create new mappings to implement default semantics. */
791 static tree
792 omp_copy_decl (tree var, copy_body_data *cb)
794 omp_context *ctx = (omp_context *) cb;
795 tree new_var;
797 if (TREE_CODE (var) == LABEL_DECL)
799 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
800 DECL_CONTEXT (new_var) = current_function_decl;
801 insert_decl_map (&ctx->cb, var, new_var);
802 return new_var;
805 while (!is_taskreg_ctx (ctx))
807 ctx = ctx->outer;
808 if (ctx == NULL)
809 return var;
810 new_var = maybe_lookup_decl (var, ctx);
811 if (new_var)
812 return new_var;
815 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
816 return var;
818 return error_mark_node;
821 /* Create a new context, with OUTER_CTX being the surrounding context. */
823 static omp_context *
824 new_omp_context (gimple *stmt, omp_context *outer_ctx)
826 omp_context *ctx = XCNEW (omp_context);
828 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
829 (splay_tree_value) ctx);
830 ctx->stmt = stmt;
832 if (outer_ctx)
834 ctx->outer = outer_ctx;
835 ctx->cb = outer_ctx->cb;
836 ctx->cb.block = NULL;
837 ctx->depth = outer_ctx->depth + 1;
839 else
841 ctx->cb.src_fn = current_function_decl;
842 ctx->cb.dst_fn = current_function_decl;
843 ctx->cb.src_node = cgraph_node::get (current_function_decl);
844 gcc_checking_assert (ctx->cb.src_node);
845 ctx->cb.dst_node = ctx->cb.src_node;
846 ctx->cb.src_cfun = cfun;
847 ctx->cb.copy_decl = omp_copy_decl;
848 ctx->cb.eh_lp_nr = 0;
849 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
850 ctx->depth = 1;
853 ctx->cb.decl_map = new hash_map<tree, tree>;
855 return ctx;
858 static gimple_seq maybe_catch_exception (gimple_seq);
860 /* Finalize task copyfn. */
862 static void
863 finalize_task_copyfn (gomp_task *task_stmt)
865 struct function *child_cfun;
866 tree child_fn;
867 gimple_seq seq = NULL, new_seq;
868 gbind *bind;
870 child_fn = gimple_omp_task_copy_fn (task_stmt);
871 if (child_fn == NULL_TREE)
872 return;
874 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
875 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
877 push_cfun (child_cfun);
878 bind = gimplify_body (child_fn, false);
879 gimple_seq_add_stmt (&seq, bind);
880 new_seq = maybe_catch_exception (seq);
881 if (new_seq != seq)
883 bind = gimple_build_bind (NULL, new_seq, NULL);
884 seq = NULL;
885 gimple_seq_add_stmt (&seq, bind);
887 gimple_set_body (child_fn, seq);
888 pop_cfun ();
890 /* Inform the callgraph about the new function. */
891 cgraph_node *node = cgraph_node::get_create (child_fn);
892 node->parallelized_function = 1;
893 cgraph_node::add_new_function (child_fn, false);
896 /* Destroy a omp_context data structures. Called through the splay tree
897 value delete callback. */
899 static void
900 delete_omp_context (splay_tree_value value)
902 omp_context *ctx = (omp_context *) value;
904 delete ctx->cb.decl_map;
906 if (ctx->field_map)
907 splay_tree_delete (ctx->field_map);
908 if (ctx->sfield_map)
909 splay_tree_delete (ctx->sfield_map);
911 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
912 it produces corrupt debug information. */
913 if (ctx->record_type)
915 tree t;
916 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
917 DECL_ABSTRACT_ORIGIN (t) = NULL;
919 if (ctx->srecord_type)
921 tree t;
922 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
923 DECL_ABSTRACT_ORIGIN (t) = NULL;
926 if (is_task_ctx (ctx))
927 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
929 XDELETE (ctx);
932 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
933 context. */
935 static void
936 fixup_child_record_type (omp_context *ctx)
938 tree f, type = ctx->record_type;
940 if (!ctx->receiver_decl)
941 return;
942 /* ??? It isn't sufficient to just call remap_type here, because
943 variably_modified_type_p doesn't work the way we expect for
944 record types. Testing each field for whether it needs remapping
945 and creating a new record by hand works, however. */
946 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
947 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
948 break;
949 if (f)
951 tree name, new_fields = NULL;
953 type = lang_hooks.types.make_type (RECORD_TYPE);
954 name = DECL_NAME (TYPE_NAME (ctx->record_type));
955 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
956 TYPE_DECL, name, type);
957 TYPE_NAME (type) = name;
959 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
961 tree new_f = copy_node (f);
962 DECL_CONTEXT (new_f) = type;
963 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
964 DECL_CHAIN (new_f) = new_fields;
965 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
966 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
967 &ctx->cb, NULL);
968 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
969 &ctx->cb, NULL);
970 new_fields = new_f;
972 /* Arrange to be able to look up the receiver field
973 given the sender field. */
974 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
975 (splay_tree_value) new_f);
977 TYPE_FIELDS (type) = nreverse (new_fields);
978 layout_type (type);
981 /* In a target region we never modify any of the pointers in *.omp_data_i,
982 so attempt to help the optimizers. */
983 if (is_gimple_omp_offloaded (ctx->stmt))
984 type = build_qualified_type (type, TYPE_QUAL_CONST);
986 TREE_TYPE (ctx->receiver_decl)
987 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
990 /* Instantiate decls as necessary in CTX to satisfy the data sharing
991 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
992 restrict. */
994 static void
995 scan_sharing_clauses (tree clauses, omp_context *ctx,
996 bool base_pointers_restrict = false)
998 tree c, decl;
999 bool scan_array_reductions = false;
1001 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1003 bool by_ref;
1005 switch (OMP_CLAUSE_CODE (c))
1007 case OMP_CLAUSE_PRIVATE:
1008 decl = OMP_CLAUSE_DECL (c);
1009 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1010 goto do_private;
1011 else if (!is_variable_sized (decl))
1012 install_var_local (decl, ctx);
1013 break;
1015 case OMP_CLAUSE_SHARED:
1016 decl = OMP_CLAUSE_DECL (c);
1017 /* Ignore shared directives in teams construct. */
1018 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1020 /* Global variables don't need to be copied,
1021 the receiver side will use them directly. */
1022 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1023 if (is_global_var (odecl))
1024 break;
1025 insert_decl_map (&ctx->cb, decl, odecl);
1026 break;
1028 gcc_assert (is_taskreg_ctx (ctx));
1029 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1030 || !is_variable_sized (decl));
1031 /* Global variables don't need to be copied,
1032 the receiver side will use them directly. */
1033 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1034 break;
1035 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1037 use_pointer_for_field (decl, ctx);
1038 break;
1040 by_ref = use_pointer_for_field (decl, NULL);
1041 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1042 || TREE_ADDRESSABLE (decl)
1043 || by_ref
1044 || omp_is_reference (decl))
1046 by_ref = use_pointer_for_field (decl, ctx);
1047 install_var_field (decl, by_ref, 3, ctx);
1048 install_var_local (decl, ctx);
1049 break;
1051 /* We don't need to copy const scalar vars back. */
1052 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1053 goto do_private;
1055 case OMP_CLAUSE_REDUCTION:
1056 decl = OMP_CLAUSE_DECL (c);
1057 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1058 && TREE_CODE (decl) == MEM_REF)
1060 tree t = TREE_OPERAND (decl, 0);
1061 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 if (TREE_CODE (t) == INDIRECT_REF
1064 || TREE_CODE (t) == ADDR_EXPR)
1065 t = TREE_OPERAND (t, 0);
1066 install_var_local (t, ctx);
1067 if (is_taskreg_ctx (ctx)
1068 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1069 && !is_variable_sized (t))
1071 by_ref = use_pointer_for_field (t, ctx);
1072 install_var_field (t, by_ref, 3, ctx);
1074 break;
1076 goto do_private;
1078 case OMP_CLAUSE_LASTPRIVATE:
1079 /* Let the corresponding firstprivate clause create
1080 the variable. */
1081 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1082 break;
1083 /* FALLTHRU */
1085 case OMP_CLAUSE_FIRSTPRIVATE:
1086 case OMP_CLAUSE_LINEAR:
1087 decl = OMP_CLAUSE_DECL (c);
1088 do_private:
1089 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1090 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1091 && is_gimple_omp_offloaded (ctx->stmt))
1093 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1094 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1095 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1096 install_var_field (decl, true, 3, ctx);
1097 else
1098 install_var_field (decl, false, 3, ctx);
1100 if (is_variable_sized (decl))
1102 if (is_task_ctx (ctx))
1103 install_var_field (decl, false, 1, ctx);
1104 break;
1106 else if (is_taskreg_ctx (ctx))
1108 bool global
1109 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1110 by_ref = use_pointer_for_field (decl, NULL);
1112 if (is_task_ctx (ctx)
1113 && (global || by_ref || omp_is_reference (decl)))
1115 install_var_field (decl, false, 1, ctx);
1116 if (!global)
1117 install_var_field (decl, by_ref, 2, ctx);
1119 else if (!global)
1120 install_var_field (decl, by_ref, 3, ctx);
1122 install_var_local (decl, ctx);
1123 break;
1125 case OMP_CLAUSE_USE_DEVICE_PTR:
1126 decl = OMP_CLAUSE_DECL (c);
1127 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1128 install_var_field (decl, true, 3, ctx);
1129 else
1130 install_var_field (decl, false, 3, ctx);
1131 if (DECL_SIZE (decl)
1132 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1134 tree decl2 = DECL_VALUE_EXPR (decl);
1135 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1136 decl2 = TREE_OPERAND (decl2, 0);
1137 gcc_assert (DECL_P (decl2));
1138 install_var_local (decl2, ctx);
1140 install_var_local (decl, ctx);
1141 break;
1143 case OMP_CLAUSE_IS_DEVICE_PTR:
1144 decl = OMP_CLAUSE_DECL (c);
1145 goto do_private;
1147 case OMP_CLAUSE__LOOPTEMP_:
1148 gcc_assert (is_taskreg_ctx (ctx));
1149 decl = OMP_CLAUSE_DECL (c);
1150 install_var_field (decl, false, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1154 case OMP_CLAUSE_COPYPRIVATE:
1155 case OMP_CLAUSE_COPYIN:
1156 decl = OMP_CLAUSE_DECL (c);
1157 by_ref = use_pointer_for_field (decl, NULL);
1158 install_var_field (decl, by_ref, 3, ctx);
1159 break;
1161 case OMP_CLAUSE_DEFAULT:
1162 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE__CILK_FOR_COUNT_:
1178 case OMP_CLAUSE_NUM_GANGS:
1179 case OMP_CLAUSE_NUM_WORKERS:
1180 case OMP_CLAUSE_VECTOR_LENGTH:
1181 if (ctx->outer)
1182 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1183 break;
1185 case OMP_CLAUSE_TO:
1186 case OMP_CLAUSE_FROM:
1187 case OMP_CLAUSE_MAP:
1188 if (ctx->outer)
1189 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1190 decl = OMP_CLAUSE_DECL (c);
1191 /* Global variables with "omp declare target" attribute
1192 don't need to be copied, the receiver side will use them
1193 directly. However, global variables with "omp declare target link"
1194 attribute need to be copied. */
1195 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1196 && DECL_P (decl)
1197 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1198 && (OMP_CLAUSE_MAP_KIND (c)
1199 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1200 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1201 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1202 && varpool_node::get_create (decl)->offloadable
1203 && !lookup_attribute ("omp declare target link",
1204 DECL_ATTRIBUTES (decl)))
1205 break;
1206 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1207 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1209 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1210 not offloaded; there is nothing to map for those. */
1211 if (!is_gimple_omp_offloaded (ctx->stmt)
1212 && !POINTER_TYPE_P (TREE_TYPE (decl))
1213 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1214 break;
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1217 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1218 || (OMP_CLAUSE_MAP_KIND (c)
1219 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1221 if (TREE_CODE (decl) == COMPONENT_REF
1222 || (TREE_CODE (decl) == INDIRECT_REF
1223 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1224 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1225 == REFERENCE_TYPE)))
1226 break;
1227 if (DECL_SIZE (decl)
1228 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1230 tree decl2 = DECL_VALUE_EXPR (decl);
1231 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1232 decl2 = TREE_OPERAND (decl2, 0);
1233 gcc_assert (DECL_P (decl2));
1234 install_var_local (decl2, ctx);
1236 install_var_local (decl, ctx);
1237 break;
1239 if (DECL_P (decl))
1241 if (DECL_SIZE (decl)
1242 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1244 tree decl2 = DECL_VALUE_EXPR (decl);
1245 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1246 decl2 = TREE_OPERAND (decl2, 0);
1247 gcc_assert (DECL_P (decl2));
1248 install_var_field (decl2, true, 3, ctx);
1249 install_var_local (decl2, ctx);
1250 install_var_local (decl, ctx);
1252 else
1254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1255 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1256 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1257 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1258 install_var_field (decl, true, 7, ctx);
1259 else
1260 install_var_field (decl, true, 3, ctx,
1261 base_pointers_restrict);
1262 if (is_gimple_omp_offloaded (ctx->stmt)
1263 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1264 install_var_local (decl, ctx);
1267 else
1269 tree base = get_base_address (decl);
1270 tree nc = OMP_CLAUSE_CHAIN (c);
1271 if (DECL_P (base)
1272 && nc != NULL_TREE
1273 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1274 && OMP_CLAUSE_DECL (nc) == base
1275 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1276 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1279 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1281 else
1283 if (ctx->outer)
1285 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1286 decl = OMP_CLAUSE_DECL (c);
1288 gcc_assert (!splay_tree_lookup (ctx->field_map,
1289 (splay_tree_key) decl));
1290 tree field
1291 = build_decl (OMP_CLAUSE_LOCATION (c),
1292 FIELD_DECL, NULL_TREE, ptr_type_node);
1293 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1294 insert_field_into_struct (ctx->record_type, field);
1295 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1296 (splay_tree_value) field);
1299 break;
1301 case OMP_CLAUSE__GRIDDIM_:
1302 if (ctx->outer)
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1305 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1307 break;
1309 case OMP_CLAUSE_NOWAIT:
1310 case OMP_CLAUSE_ORDERED:
1311 case OMP_CLAUSE_COLLAPSE:
1312 case OMP_CLAUSE_UNTIED:
1313 case OMP_CLAUSE_MERGEABLE:
1314 case OMP_CLAUSE_PROC_BIND:
1315 case OMP_CLAUSE_SAFELEN:
1316 case OMP_CLAUSE_SIMDLEN:
1317 case OMP_CLAUSE_THREADS:
1318 case OMP_CLAUSE_SIMD:
1319 case OMP_CLAUSE_NOGROUP:
1320 case OMP_CLAUSE_DEFAULTMAP:
1321 case OMP_CLAUSE_ASYNC:
1322 case OMP_CLAUSE_WAIT:
1323 case OMP_CLAUSE_GANG:
1324 case OMP_CLAUSE_WORKER:
1325 case OMP_CLAUSE_VECTOR:
1326 case OMP_CLAUSE_INDEPENDENT:
1327 case OMP_CLAUSE_AUTO:
1328 case OMP_CLAUSE_SEQ:
1329 case OMP_CLAUSE__SIMT_:
1330 break;
1332 case OMP_CLAUSE_ALIGNED:
1333 decl = OMP_CLAUSE_DECL (c);
1334 if (is_global_var (decl)
1335 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1336 install_var_local (decl, ctx);
1337 break;
1339 case OMP_CLAUSE_TILE:
1340 case OMP_CLAUSE__CACHE_:
1341 default:
1342 gcc_unreachable ();
1346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1348 switch (OMP_CLAUSE_CODE (c))
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 /* Let the corresponding firstprivate clause create
1352 the variable. */
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1354 scan_array_reductions = true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 break;
1357 /* FALLTHRU */
1359 case OMP_CLAUSE_FIRSTPRIVATE:
1360 case OMP_CLAUSE_PRIVATE:
1361 case OMP_CLAUSE_LINEAR:
1362 case OMP_CLAUSE_IS_DEVICE_PTR:
1363 decl = OMP_CLAUSE_DECL (c);
1364 if (is_variable_sized (decl))
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1370 tree decl2 = DECL_VALUE_EXPR (decl);
1371 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 decl2 = TREE_OPERAND (decl2, 0);
1373 gcc_assert (DECL_P (decl2));
1374 install_var_local (decl2, ctx);
1375 fixup_remapped_decl (decl2, ctx, false);
1377 install_var_local (decl, ctx);
1379 fixup_remapped_decl (decl, ctx,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1384 scan_array_reductions = true;
1385 break;
1387 case OMP_CLAUSE_REDUCTION:
1388 decl = OMP_CLAUSE_DECL (c);
1389 if (TREE_CODE (decl) != MEM_REF)
1391 if (is_variable_sized (decl))
1392 install_var_local (decl, ctx);
1393 fixup_remapped_decl (decl, ctx, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1396 scan_array_reductions = true;
1397 break;
1399 case OMP_CLAUSE_SHARED:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 break;
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 break;
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 ctx->outer)))
1410 break;
1411 bool by_ref = use_pointer_for_field (decl, ctx);
1412 install_var_field (decl, by_ref, 11, ctx);
1413 break;
1415 fixup_remapped_decl (decl, ctx, false);
1416 break;
1418 case OMP_CLAUSE_MAP:
1419 if (!is_gimple_omp_offloaded (ctx->stmt))
1420 break;
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (DECL_P (decl)
1423 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1426 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1428 && varpool_node::get_create (decl)->offloadable)
1429 break;
1430 if (DECL_P (decl))
1432 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1434 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1437 tree new_decl = lookup_decl (decl, ctx);
1438 TREE_TYPE (new_decl)
1439 = remap_type (TREE_TYPE (decl), &ctx->cb);
1441 else if (DECL_SIZE (decl)
1442 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1444 tree decl2 = DECL_VALUE_EXPR (decl);
1445 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 decl2 = TREE_OPERAND (decl2, 0);
1447 gcc_assert (DECL_P (decl2));
1448 fixup_remapped_decl (decl2, ctx, false);
1449 fixup_remapped_decl (decl, ctx, true);
1451 else
1452 fixup_remapped_decl (decl, ctx, false);
1454 break;
1456 case OMP_CLAUSE_COPYPRIVATE:
1457 case OMP_CLAUSE_COPYIN:
1458 case OMP_CLAUSE_DEFAULT:
1459 case OMP_CLAUSE_IF:
1460 case OMP_CLAUSE_NUM_THREADS:
1461 case OMP_CLAUSE_NUM_TEAMS:
1462 case OMP_CLAUSE_THREAD_LIMIT:
1463 case OMP_CLAUSE_DEVICE:
1464 case OMP_CLAUSE_SCHEDULE:
1465 case OMP_CLAUSE_DIST_SCHEDULE:
1466 case OMP_CLAUSE_NOWAIT:
1467 case OMP_CLAUSE_ORDERED:
1468 case OMP_CLAUSE_COLLAPSE:
1469 case OMP_CLAUSE_UNTIED:
1470 case OMP_CLAUSE_FINAL:
1471 case OMP_CLAUSE_MERGEABLE:
1472 case OMP_CLAUSE_PROC_BIND:
1473 case OMP_CLAUSE_SAFELEN:
1474 case OMP_CLAUSE_SIMDLEN:
1475 case OMP_CLAUSE_ALIGNED:
1476 case OMP_CLAUSE_DEPEND:
1477 case OMP_CLAUSE__LOOPTEMP_:
1478 case OMP_CLAUSE_TO:
1479 case OMP_CLAUSE_FROM:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_THREADS:
1484 case OMP_CLAUSE_SIMD:
1485 case OMP_CLAUSE_NOGROUP:
1486 case OMP_CLAUSE_DEFAULTMAP:
1487 case OMP_CLAUSE_USE_DEVICE_PTR:
1488 case OMP_CLAUSE__CILK_FOR_COUNT_:
1489 case OMP_CLAUSE_ASYNC:
1490 case OMP_CLAUSE_WAIT:
1491 case OMP_CLAUSE_NUM_GANGS:
1492 case OMP_CLAUSE_NUM_WORKERS:
1493 case OMP_CLAUSE_VECTOR_LENGTH:
1494 case OMP_CLAUSE_GANG:
1495 case OMP_CLAUSE_WORKER:
1496 case OMP_CLAUSE_VECTOR:
1497 case OMP_CLAUSE_INDEPENDENT:
1498 case OMP_CLAUSE_AUTO:
1499 case OMP_CLAUSE_SEQ:
1500 case OMP_CLAUSE__GRIDDIM_:
1501 case OMP_CLAUSE__SIMT_:
1502 break;
1504 case OMP_CLAUSE_TILE:
1505 case OMP_CLAUSE__CACHE_:
1506 default:
1507 gcc_unreachable ();
1511 gcc_checking_assert (!scan_array_reductions
1512 || !is_gimple_omp_oacc (ctx->stmt));
1513 if (scan_array_reductions)
1515 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1516 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1517 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1520 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1522 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1523 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1524 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1525 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1526 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1527 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1531 /* Create a new name for omp child function. Returns an identifier. If
1532 IS_CILK_FOR is true then the suffix for the child function is
1533 "_cilk_for_fn." */
1535 static tree
1536 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1538 if (is_cilk_for)
1539 return clone_function_name (current_function_decl, "_cilk_for_fn");
1540 return clone_function_name (current_function_decl,
1541 task_copy ? "_omp_cpyfn" : "_omp_fn");
1544 /* Returns the type of the induction variable for the child function for
1545 _Cilk_for and the types for _high and _low variables based on TYPE. */
1547 static tree
1548 cilk_for_check_loop_diff_type (tree type)
1550 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1552 if (TYPE_UNSIGNED (type))
1553 return uint32_type_node;
1554 else
1555 return integer_type_node;
1557 else
1559 if (TYPE_UNSIGNED (type))
1560 return uint64_type_node;
1561 else
1562 return long_long_integer_type_node;
1566 /* Return true if CTX may belong to offloaded code: either if current function
1567 is offloaded, or any enclosing context corresponds to a target region. */
1569 static bool
1570 omp_maybe_offloaded_ctx (omp_context *ctx)
1572 if (cgraph_node::get (current_function_decl)->offloadable)
1573 return true;
1574 for (; ctx; ctx = ctx->outer)
1575 if (is_gimple_omp_offloaded (ctx->stmt))
1576 return true;
1577 return false;
1580 /* Build a decl for the omp child function. It'll not contain a body
1581 yet, just the bare decl. */
1583 static void
1584 create_omp_child_function (omp_context *ctx, bool task_copy)
1586 tree decl, type, name, t;
1588 tree cilk_for_count
1589 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1590 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1591 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1592 tree cilk_var_type = NULL_TREE;
1594 name = create_omp_child_function_name (task_copy,
1595 cilk_for_count != NULL_TREE);
1596 if (task_copy)
1597 type = build_function_type_list (void_type_node, ptr_type_node,
1598 ptr_type_node, NULL_TREE);
1599 else if (cilk_for_count)
1601 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1602 cilk_var_type = cilk_for_check_loop_diff_type (type);
1603 type = build_function_type_list (void_type_node, ptr_type_node,
1604 cilk_var_type, cilk_var_type, NULL_TREE);
1606 else
1607 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1609 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1611 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1612 || !task_copy);
1613 if (!task_copy)
1614 ctx->cb.dst_fn = decl;
1615 else
1616 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1618 TREE_STATIC (decl) = 1;
1619 TREE_USED (decl) = 1;
1620 DECL_ARTIFICIAL (decl) = 1;
1621 DECL_IGNORED_P (decl) = 0;
1622 TREE_PUBLIC (decl) = 0;
1623 DECL_UNINLINABLE (decl) = 1;
1624 DECL_EXTERNAL (decl) = 0;
1625 DECL_CONTEXT (decl) = NULL_TREE;
1626 DECL_INITIAL (decl) = make_node (BLOCK);
1627 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1628 if (omp_maybe_offloaded_ctx (ctx))
1630 cgraph_node::get_create (decl)->offloadable = 1;
1631 if (ENABLE_OFFLOADING)
1632 g->have_offload = true;
1635 if (cgraph_node::get_create (decl)->offloadable
1636 && !lookup_attribute ("omp declare target",
1637 DECL_ATTRIBUTES (current_function_decl)))
1639 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1640 ? "omp target entrypoint"
1641 : "omp declare target");
1642 DECL_ATTRIBUTES (decl)
1643 = tree_cons (get_identifier (target_attr),
1644 NULL_TREE, DECL_ATTRIBUTES (decl));
1647 t = build_decl (DECL_SOURCE_LOCATION (decl),
1648 RESULT_DECL, NULL_TREE, void_type_node);
1649 DECL_ARTIFICIAL (t) = 1;
1650 DECL_IGNORED_P (t) = 1;
1651 DECL_CONTEXT (t) = decl;
1652 DECL_RESULT (decl) = t;
1654 /* _Cilk_for's child function requires two extra parameters called
1655 __low and __high that are set the by Cilk runtime when it calls this
1656 function. */
1657 if (cilk_for_count)
1659 t = build_decl (DECL_SOURCE_LOCATION (decl),
1660 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1661 DECL_ARTIFICIAL (t) = 1;
1662 DECL_NAMELESS (t) = 1;
1663 DECL_ARG_TYPE (t) = ptr_type_node;
1664 DECL_CONTEXT (t) = current_function_decl;
1665 TREE_USED (t) = 1;
1666 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1667 DECL_ARGUMENTS (decl) = t;
1669 t = build_decl (DECL_SOURCE_LOCATION (decl),
1670 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1671 DECL_ARTIFICIAL (t) = 1;
1672 DECL_NAMELESS (t) = 1;
1673 DECL_ARG_TYPE (t) = ptr_type_node;
1674 DECL_CONTEXT (t) = current_function_decl;
1675 TREE_USED (t) = 1;
1676 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1677 DECL_ARGUMENTS (decl) = t;
1680 tree data_name = get_identifier (".omp_data_i");
1681 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1682 ptr_type_node);
1683 DECL_ARTIFICIAL (t) = 1;
1684 DECL_NAMELESS (t) = 1;
1685 DECL_ARG_TYPE (t) = ptr_type_node;
1686 DECL_CONTEXT (t) = current_function_decl;
1687 TREE_USED (t) = 1;
1688 TREE_READONLY (t) = 1;
1689 if (cilk_for_count)
1690 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1691 DECL_ARGUMENTS (decl) = t;
1692 if (!task_copy)
1693 ctx->receiver_decl = t;
1694 else
1696 t = build_decl (DECL_SOURCE_LOCATION (decl),
1697 PARM_DECL, get_identifier (".omp_data_o"),
1698 ptr_type_node);
1699 DECL_ARTIFICIAL (t) = 1;
1700 DECL_NAMELESS (t) = 1;
1701 DECL_ARG_TYPE (t) = ptr_type_node;
1702 DECL_CONTEXT (t) = current_function_decl;
1703 TREE_USED (t) = 1;
1704 TREE_ADDRESSABLE (t) = 1;
1705 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1706 DECL_ARGUMENTS (decl) = t;
1709 /* Allocate memory for the function structure. The call to
1710 allocate_struct_function clobbers CFUN, so we need to restore
1711 it afterward. */
1712 push_struct_function (decl);
1713 cfun->function_end_locus = gimple_location (ctx->stmt);
1714 init_tree_ssa (cfun);
1715 pop_cfun ();
1718 /* Callback for walk_gimple_seq. Check if combined parallel
1719 contains gimple_omp_for_combined_into_p OMP_FOR. */
1721 tree
1722 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1723 bool *handled_ops_p,
1724 struct walk_stmt_info *wi)
1726 gimple *stmt = gsi_stmt (*gsi_p);
1728 *handled_ops_p = true;
1729 switch (gimple_code (stmt))
1731 WALK_SUBSTMTS;
1733 case GIMPLE_OMP_FOR:
1734 if (gimple_omp_for_combined_into_p (stmt)
1735 && gimple_omp_for_kind (stmt)
1736 == *(const enum gf_mask *) (wi->info))
1738 wi->info = stmt;
1739 return integer_zero_node;
1741 break;
1742 default:
1743 break;
1745 return NULL;
1748 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1750 static void
1751 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1752 omp_context *outer_ctx)
1754 struct walk_stmt_info wi;
1756 memset (&wi, 0, sizeof (wi));
1757 wi.val_only = true;
1758 wi.info = (void *) &msk;
1759 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1760 if (wi.info != (void *) &msk)
1762 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1763 struct omp_for_data fd;
1764 omp_extract_for_data (for_stmt, &fd, NULL);
1765 /* We need two temporaries with fd.loop.v type (istart/iend)
1766 and then (fd.collapse - 1) temporaries with the same
1767 type for count2 ... countN-1 vars if not constant. */
1768 size_t count = 2, i;
1769 tree type = fd.iter_type;
1770 if (fd.collapse > 1
1771 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1773 count += fd.collapse - 1;
1774 /* If there are lastprivate clauses on the inner
1775 GIMPLE_OMP_FOR, add one more temporaries for the total number
1776 of iterations (product of count1 ... countN-1). */
1777 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1778 OMP_CLAUSE_LASTPRIVATE))
1779 count++;
1780 else if (msk == GF_OMP_FOR_KIND_FOR
1781 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1782 OMP_CLAUSE_LASTPRIVATE))
1783 count++;
1785 for (i = 0; i < count; i++)
1787 tree temp = create_tmp_var (type);
1788 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1789 insert_decl_map (&outer_ctx->cb, temp, temp);
1790 OMP_CLAUSE_DECL (c) = temp;
1791 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1792 gimple_omp_taskreg_set_clauses (stmt, c);
1797 /* Scan an OpenMP parallel directive. */
1799 static void
1800 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1802 omp_context *ctx;
1803 tree name;
1804 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1806 /* Ignore parallel directives with empty bodies, unless there
1807 are copyin clauses. */
1808 if (optimize > 0
1809 && empty_body_p (gimple_omp_body (stmt))
1810 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1811 OMP_CLAUSE_COPYIN) == NULL)
1813 gsi_replace (gsi, gimple_build_nop (), false);
1814 return;
1817 if (gimple_omp_parallel_combined_p (stmt))
1818 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1820 ctx = new_omp_context (stmt, outer_ctx);
1821 taskreg_contexts.safe_push (ctx);
1822 if (taskreg_nesting_level > 1)
1823 ctx->is_nested = true;
1824 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1825 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1826 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1827 name = create_tmp_var_name (".omp_data_s");
1828 name = build_decl (gimple_location (stmt),
1829 TYPE_DECL, name, ctx->record_type);
1830 DECL_ARTIFICIAL (name) = 1;
1831 DECL_NAMELESS (name) = 1;
1832 TYPE_NAME (ctx->record_type) = name;
1833 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1834 if (!gimple_omp_parallel_grid_phony (stmt))
1836 create_omp_child_function (ctx, false);
1837 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1840 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1841 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1843 if (TYPE_FIELDS (ctx->record_type) == NULL)
1844 ctx->record_type = ctx->receiver_decl = NULL;
1847 /* Scan an OpenMP task directive. */
1849 static void
1850 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1852 omp_context *ctx;
1853 tree name, t;
1854 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1856 /* Ignore task directives with empty bodies. */
1857 if (optimize > 0
1858 && empty_body_p (gimple_omp_body (stmt)))
1860 gsi_replace (gsi, gimple_build_nop (), false);
1861 return;
1864 if (gimple_omp_task_taskloop_p (stmt))
1865 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1867 ctx = new_omp_context (stmt, outer_ctx);
1868 taskreg_contexts.safe_push (ctx);
1869 if (taskreg_nesting_level > 1)
1870 ctx->is_nested = true;
1871 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1872 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1873 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1874 name = create_tmp_var_name (".omp_data_s");
1875 name = build_decl (gimple_location (stmt),
1876 TYPE_DECL, name, ctx->record_type);
1877 DECL_ARTIFICIAL (name) = 1;
1878 DECL_NAMELESS (name) = 1;
1879 TYPE_NAME (ctx->record_type) = name;
1880 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1881 create_omp_child_function (ctx, false);
1882 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1884 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1886 if (ctx->srecord_type)
1888 name = create_tmp_var_name (".omp_data_a");
1889 name = build_decl (gimple_location (stmt),
1890 TYPE_DECL, name, ctx->srecord_type);
1891 DECL_ARTIFICIAL (name) = 1;
1892 DECL_NAMELESS (name) = 1;
1893 TYPE_NAME (ctx->srecord_type) = name;
1894 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1895 create_omp_child_function (ctx, true);
1898 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1900 if (TYPE_FIELDS (ctx->record_type) == NULL)
1902 ctx->record_type = ctx->receiver_decl = NULL;
1903 t = build_int_cst (long_integer_type_node, 0);
1904 gimple_omp_task_set_arg_size (stmt, t);
1905 t = build_int_cst (long_integer_type_node, 1);
1906 gimple_omp_task_set_arg_align (stmt, t);
1911 /* If any decls have been made addressable during scan_omp,
1912 adjust their fields if needed, and layout record types
1913 of parallel/task constructs. */
1915 static void
1916 finish_taskreg_scan (omp_context *ctx)
1918 if (ctx->record_type == NULL_TREE)
1919 return;
1921 /* If any task_shared_vars were needed, verify all
1922 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1923 statements if use_pointer_for_field hasn't changed
1924 because of that. If it did, update field types now. */
1925 if (task_shared_vars)
1927 tree c;
1929 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1930 c; c = OMP_CLAUSE_CHAIN (c))
1931 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1932 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1934 tree decl = OMP_CLAUSE_DECL (c);
1936 /* Global variables don't need to be copied,
1937 the receiver side will use them directly. */
1938 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1939 continue;
1940 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1941 || !use_pointer_for_field (decl, ctx))
1942 continue;
1943 tree field = lookup_field (decl, ctx);
1944 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1945 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1946 continue;
1947 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1948 TREE_THIS_VOLATILE (field) = 0;
1949 DECL_USER_ALIGN (field) = 0;
1950 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1951 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1952 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1953 if (ctx->srecord_type)
1955 tree sfield = lookup_sfield (decl, ctx);
1956 TREE_TYPE (sfield) = TREE_TYPE (field);
1957 TREE_THIS_VOLATILE (sfield) = 0;
1958 DECL_USER_ALIGN (sfield) = 0;
1959 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1960 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1961 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1966 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1968 layout_type (ctx->record_type);
1969 fixup_child_record_type (ctx);
1971 else
1973 location_t loc = gimple_location (ctx->stmt);
1974 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1975 /* Move VLA fields to the end. */
1976 p = &TYPE_FIELDS (ctx->record_type);
1977 while (*p)
1978 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1979 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1981 *q = *p;
1982 *p = TREE_CHAIN (*p);
1983 TREE_CHAIN (*q) = NULL_TREE;
1984 q = &TREE_CHAIN (*q);
1986 else
1987 p = &DECL_CHAIN (*p);
1988 *p = vla_fields;
1989 if (gimple_omp_task_taskloop_p (ctx->stmt))
1991 /* Move fields corresponding to first and second _looptemp_
1992 clause first. There are filled by GOMP_taskloop
1993 and thus need to be in specific positions. */
1994 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1995 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1996 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1997 OMP_CLAUSE__LOOPTEMP_);
1998 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1999 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2000 p = &TYPE_FIELDS (ctx->record_type);
2001 while (*p)
2002 if (*p == f1 || *p == f2)
2003 *p = DECL_CHAIN (*p);
2004 else
2005 p = &DECL_CHAIN (*p);
2006 DECL_CHAIN (f1) = f2;
2007 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2008 TYPE_FIELDS (ctx->record_type) = f1;
2009 if (ctx->srecord_type)
2011 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2012 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2013 p = &TYPE_FIELDS (ctx->srecord_type);
2014 while (*p)
2015 if (*p == f1 || *p == f2)
2016 *p = DECL_CHAIN (*p);
2017 else
2018 p = &DECL_CHAIN (*p);
2019 DECL_CHAIN (f1) = f2;
2020 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2021 TYPE_FIELDS (ctx->srecord_type) = f1;
2024 layout_type (ctx->record_type);
2025 fixup_child_record_type (ctx);
2026 if (ctx->srecord_type)
2027 layout_type (ctx->srecord_type);
2028 tree t = fold_convert_loc (loc, long_integer_type_node,
2029 TYPE_SIZE_UNIT (ctx->record_type));
2030 gimple_omp_task_set_arg_size (ctx->stmt, t);
2031 t = build_int_cst (long_integer_type_node,
2032 TYPE_ALIGN_UNIT (ctx->record_type));
2033 gimple_omp_task_set_arg_align (ctx->stmt, t);
2037 /* Find the enclosing offload context. */
2039 static omp_context *
2040 enclosing_target_ctx (omp_context *ctx)
2042 for (; ctx; ctx = ctx->outer)
2043 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2044 break;
2046 return ctx;
2049 /* Return true if ctx is part of an oacc kernels region. */
2051 static bool
2052 ctx_in_oacc_kernels_region (omp_context *ctx)
2054 for (;ctx != NULL; ctx = ctx->outer)
2056 gimple *stmt = ctx->stmt;
2057 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2058 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2059 return true;
2062 return false;
2065 /* Check the parallelism clauses inside a kernels regions.
2066 Until kernels handling moves to use the same loop indirection
2067 scheme as parallel, we need to do this checking early. */
2069 static unsigned
2070 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2072 bool checking = true;
2073 unsigned outer_mask = 0;
2074 unsigned this_mask = 0;
2075 bool has_seq = false, has_auto = false;
2077 if (ctx->outer)
2078 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2079 if (!stmt)
2081 checking = false;
2082 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2083 return outer_mask;
2084 stmt = as_a <gomp_for *> (ctx->stmt);
2087 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2089 switch (OMP_CLAUSE_CODE (c))
2091 case OMP_CLAUSE_GANG:
2092 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2093 break;
2094 case OMP_CLAUSE_WORKER:
2095 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2096 break;
2097 case OMP_CLAUSE_VECTOR:
2098 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2099 break;
2100 case OMP_CLAUSE_SEQ:
2101 has_seq = true;
2102 break;
2103 case OMP_CLAUSE_AUTO:
2104 has_auto = true;
2105 break;
2106 default:
2107 break;
2111 if (checking)
2113 if (has_seq && (this_mask || has_auto))
2114 error_at (gimple_location (stmt), "%<seq%> overrides other"
2115 " OpenACC loop specifiers");
2116 else if (has_auto && this_mask)
2117 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2118 " OpenACC loop specifiers");
2120 if (this_mask & outer_mask)
2121 error_at (gimple_location (stmt), "inner loop uses same"
2122 " OpenACC parallelism as containing loop");
2125 return outer_mask | this_mask;
2128 /* Scan a GIMPLE_OMP_FOR. */
2130 static void
2131 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2133 omp_context *ctx;
2134 size_t i;
2135 tree clauses = gimple_omp_for_clauses (stmt);
2137 ctx = new_omp_context (stmt, outer_ctx);
2139 if (is_gimple_omp_oacc (stmt))
2141 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2143 if (!tgt || is_oacc_parallel (tgt))
2144 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2146 char const *check = NULL;
2148 switch (OMP_CLAUSE_CODE (c))
2150 case OMP_CLAUSE_GANG:
2151 check = "gang";
2152 break;
2154 case OMP_CLAUSE_WORKER:
2155 check = "worker";
2156 break;
2158 case OMP_CLAUSE_VECTOR:
2159 check = "vector";
2160 break;
2162 default:
2163 break;
2166 if (check && OMP_CLAUSE_OPERAND (c, 0))
2167 error_at (gimple_location (stmt),
2168 "argument not permitted on %qs clause in"
2169 " OpenACC %<parallel%>", check);
2172 if (tgt && is_oacc_kernels (tgt))
2174 /* Strip out reductions, as they are not handled yet. */
2175 tree *prev_ptr = &clauses;
2177 while (tree probe = *prev_ptr)
2179 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2181 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2182 *prev_ptr = *next_ptr;
2183 else
2184 prev_ptr = next_ptr;
2187 gimple_omp_for_set_clauses (stmt, clauses);
2188 check_oacc_kernel_gwv (stmt, ctx);
2192 scan_sharing_clauses (clauses, ctx);
2194 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2195 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2197 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2198 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2199 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2200 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2202 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2205 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2207 static void
2208 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2209 omp_context *outer_ctx)
2211 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2212 gsi_replace (gsi, bind, false);
2213 gimple_seq seq = NULL;
2214 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2215 tree cond = create_tmp_var_raw (integer_type_node);
2216 DECL_CONTEXT (cond) = current_function_decl;
2217 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2218 gimple_bind_set_vars (bind, cond);
2219 gimple_call_set_lhs (g, cond);
2220 gimple_seq_add_stmt (&seq, g);
2221 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2222 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2223 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2224 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2225 gimple_seq_add_stmt (&seq, g);
2226 g = gimple_build_label (lab1);
2227 gimple_seq_add_stmt (&seq, g);
2228 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2229 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2230 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2231 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2232 gimple_omp_for_set_clauses (new_stmt, clause);
2233 gimple_seq_add_stmt (&seq, new_stmt);
2234 g = gimple_build_goto (lab3);
2235 gimple_seq_add_stmt (&seq, g);
2236 g = gimple_build_label (lab2);
2237 gimple_seq_add_stmt (&seq, g);
2238 gimple_seq_add_stmt (&seq, stmt);
2239 g = gimple_build_label (lab3);
2240 gimple_seq_add_stmt (&seq, g);
2241 gimple_bind_set_body (bind, seq);
2242 update_stmt (bind);
2243 scan_omp_for (new_stmt, outer_ctx);
2244 scan_omp_for (stmt, outer_ctx);
2247 /* Scan an OpenMP sections directive. */
2249 static void
2250 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2252 omp_context *ctx;
2254 ctx = new_omp_context (stmt, outer_ctx);
2255 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2256 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2259 /* Scan an OpenMP single directive. */
2261 static void
2262 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2264 omp_context *ctx;
2265 tree name;
2267 ctx = new_omp_context (stmt, outer_ctx);
2268 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2269 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2270 name = create_tmp_var_name (".omp_copy_s");
2271 name = build_decl (gimple_location (stmt),
2272 TYPE_DECL, name, ctx->record_type);
2273 TYPE_NAME (ctx->record_type) = name;
2275 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2276 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2278 if (TYPE_FIELDS (ctx->record_type) == NULL)
2279 ctx->record_type = NULL;
2280 else
2281 layout_type (ctx->record_type);
2284 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2285 used in the corresponding offloaded function are restrict. */
2287 static bool
2288 omp_target_base_pointers_restrict_p (tree clauses)
2290 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2291 used by OpenACC. */
2292 if (flag_openacc == 0)
2293 return false;
2295 /* I. Basic example:
2297 void foo (void)
2299 unsigned int a[2], b[2];
2301 #pragma acc kernels \
2302 copyout (a) \
2303 copyout (b)
2305 a[0] = 0;
2306 b[0] = 1;
2310 After gimplification, we have:
2312 #pragma omp target oacc_kernels \
2313 map(force_from:a [len: 8]) \
2314 map(force_from:b [len: 8])
2316 a[0] = 0;
2317 b[0] = 1;
2320 Because both mappings have the force prefix, we know that they will be
2321 allocated when calling the corresponding offloaded function, which means we
2322 can mark the base pointers for a and b in the offloaded function as
2323 restrict. */
2325 tree c;
2326 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2328 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2329 return false;
2331 switch (OMP_CLAUSE_MAP_KIND (c))
2333 case GOMP_MAP_FORCE_ALLOC:
2334 case GOMP_MAP_FORCE_TO:
2335 case GOMP_MAP_FORCE_FROM:
2336 case GOMP_MAP_FORCE_TOFROM:
2337 break;
2338 default:
2339 return false;
2343 return true;
2346 /* Scan a GIMPLE_OMP_TARGET. */
2348 static void
2349 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2351 omp_context *ctx;
2352 tree name;
2353 bool offloaded = is_gimple_omp_offloaded (stmt);
2354 tree clauses = gimple_omp_target_clauses (stmt);
2356 ctx = new_omp_context (stmt, outer_ctx);
2357 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2358 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2359 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2360 name = create_tmp_var_name (".omp_data_t");
2361 name = build_decl (gimple_location (stmt),
2362 TYPE_DECL, name, ctx->record_type);
2363 DECL_ARTIFICIAL (name) = 1;
2364 DECL_NAMELESS (name) = 1;
2365 TYPE_NAME (ctx->record_type) = name;
2366 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2368 bool base_pointers_restrict = false;
2369 if (offloaded)
2371 create_omp_child_function (ctx, false);
2372 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2374 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2375 if (base_pointers_restrict
2376 && dump_file && (dump_flags & TDF_DETAILS))
2377 fprintf (dump_file,
2378 "Base pointers in offloaded function are restrict\n");
2381 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2382 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2384 if (TYPE_FIELDS (ctx->record_type) == NULL)
2385 ctx->record_type = ctx->receiver_decl = NULL;
2386 else
2388 TYPE_FIELDS (ctx->record_type)
2389 = nreverse (TYPE_FIELDS (ctx->record_type));
2390 if (flag_checking)
2392 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2393 for (tree field = TYPE_FIELDS (ctx->record_type);
2394 field;
2395 field = DECL_CHAIN (field))
2396 gcc_assert (DECL_ALIGN (field) == align);
2398 layout_type (ctx->record_type);
2399 if (offloaded)
2400 fixup_child_record_type (ctx);
2404 /* Scan an OpenMP teams directive. */
2406 static void
2407 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2409 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2410 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2411 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2414 /* Check nesting restrictions. */
2415 static bool
2416 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2418 tree c;
2420 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2421 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2422 the original copy of its contents. */
2423 return true;
2425 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2426 inside an OpenACC CTX. */
2427 if (!(is_gimple_omp (stmt)
2428 && is_gimple_omp_oacc (stmt))
2429 /* Except for atomic codes that we share with OpenMP. */
2430 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2431 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2433 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2435 error_at (gimple_location (stmt),
2436 "non-OpenACC construct inside of OpenACC routine");
2437 return false;
2439 else
2440 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2441 if (is_gimple_omp (octx->stmt)
2442 && is_gimple_omp_oacc (octx->stmt))
2444 error_at (gimple_location (stmt),
2445 "non-OpenACC construct inside of OpenACC region");
2446 return false;
2450 if (ctx != NULL)
2452 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2453 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2455 c = NULL_TREE;
2456 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2458 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2459 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2461 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2462 && (ctx->outer == NULL
2463 || !gimple_omp_for_combined_into_p (ctx->stmt)
2464 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2465 || (gimple_omp_for_kind (ctx->outer->stmt)
2466 != GF_OMP_FOR_KIND_FOR)
2467 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2469 error_at (gimple_location (stmt),
2470 "%<ordered simd threads%> must be closely "
2471 "nested inside of %<for simd%> region");
2472 return false;
2474 return true;
2477 error_at (gimple_location (stmt),
2478 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2479 " may not be nested inside %<simd%> region");
2480 return false;
2482 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2484 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2485 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2486 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2487 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2489 error_at (gimple_location (stmt),
2490 "only %<distribute%> or %<parallel%> regions are "
2491 "allowed to be strictly nested inside %<teams%> "
2492 "region");
2493 return false;
2497 switch (gimple_code (stmt))
2499 case GIMPLE_OMP_FOR:
2500 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2501 return true;
2502 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2504 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2506 error_at (gimple_location (stmt),
2507 "%<distribute%> region must be strictly nested "
2508 "inside %<teams%> construct");
2509 return false;
2511 return true;
2513 /* We split taskloop into task and nested taskloop in it. */
2514 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2515 return true;
2516 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2518 bool ok = false;
2520 if (ctx)
2521 switch (gimple_code (ctx->stmt))
2523 case GIMPLE_OMP_FOR:
2524 ok = (gimple_omp_for_kind (ctx->stmt)
2525 == GF_OMP_FOR_KIND_OACC_LOOP);
2526 break;
2528 case GIMPLE_OMP_TARGET:
2529 switch (gimple_omp_target_kind (ctx->stmt))
2531 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2532 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2533 ok = true;
2534 break;
2536 default:
2537 break;
2540 default:
2541 break;
2543 else if (oacc_get_fn_attrib (current_function_decl))
2544 ok = true;
2545 if (!ok)
2547 error_at (gimple_location (stmt),
2548 "OpenACC loop directive must be associated with"
2549 " an OpenACC compute region");
2550 return false;
2553 /* FALLTHRU */
2554 case GIMPLE_CALL:
2555 if (is_gimple_call (stmt)
2556 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2557 == BUILT_IN_GOMP_CANCEL
2558 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2559 == BUILT_IN_GOMP_CANCELLATION_POINT))
2561 const char *bad = NULL;
2562 const char *kind = NULL;
2563 const char *construct
2564 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2565 == BUILT_IN_GOMP_CANCEL)
2566 ? "#pragma omp cancel"
2567 : "#pragma omp cancellation point";
2568 if (ctx == NULL)
2570 error_at (gimple_location (stmt), "orphaned %qs construct",
2571 construct);
2572 return false;
2574 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2575 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2576 : 0)
2578 case 1:
2579 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2580 bad = "#pragma omp parallel";
2581 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2582 == BUILT_IN_GOMP_CANCEL
2583 && !integer_zerop (gimple_call_arg (stmt, 1)))
2584 ctx->cancellable = true;
2585 kind = "parallel";
2586 break;
2587 case 2:
2588 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2589 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2590 bad = "#pragma omp for";
2591 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2592 == BUILT_IN_GOMP_CANCEL
2593 && !integer_zerop (gimple_call_arg (stmt, 1)))
2595 ctx->cancellable = true;
2596 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2597 OMP_CLAUSE_NOWAIT))
2598 warning_at (gimple_location (stmt), 0,
2599 "%<#pragma omp cancel for%> inside "
2600 "%<nowait%> for construct");
2601 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2602 OMP_CLAUSE_ORDERED))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel for%> inside "
2605 "%<ordered%> for construct");
2607 kind = "for";
2608 break;
2609 case 4:
2610 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2611 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2612 bad = "#pragma omp sections";
2613 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2614 == BUILT_IN_GOMP_CANCEL
2615 && !integer_zerop (gimple_call_arg (stmt, 1)))
2617 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2619 ctx->cancellable = true;
2620 if (omp_find_clause (gimple_omp_sections_clauses
2621 (ctx->stmt),
2622 OMP_CLAUSE_NOWAIT))
2623 warning_at (gimple_location (stmt), 0,
2624 "%<#pragma omp cancel sections%> inside "
2625 "%<nowait%> sections construct");
2627 else
2629 gcc_assert (ctx->outer
2630 && gimple_code (ctx->outer->stmt)
2631 == GIMPLE_OMP_SECTIONS);
2632 ctx->outer->cancellable = true;
2633 if (omp_find_clause (gimple_omp_sections_clauses
2634 (ctx->outer->stmt),
2635 OMP_CLAUSE_NOWAIT))
2636 warning_at (gimple_location (stmt), 0,
2637 "%<#pragma omp cancel sections%> inside "
2638 "%<nowait%> sections construct");
2641 kind = "sections";
2642 break;
2643 case 8:
2644 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2645 bad = "#pragma omp task";
2646 else
2648 for (omp_context *octx = ctx->outer;
2649 octx; octx = octx->outer)
2651 switch (gimple_code (octx->stmt))
2653 case GIMPLE_OMP_TASKGROUP:
2654 break;
2655 case GIMPLE_OMP_TARGET:
2656 if (gimple_omp_target_kind (octx->stmt)
2657 != GF_OMP_TARGET_KIND_REGION)
2658 continue;
2659 /* FALLTHRU */
2660 case GIMPLE_OMP_PARALLEL:
2661 case GIMPLE_OMP_TEAMS:
2662 error_at (gimple_location (stmt),
2663 "%<%s taskgroup%> construct not closely "
2664 "nested inside of %<taskgroup%> region",
2665 construct);
2666 return false;
2667 default:
2668 continue;
2670 break;
2672 ctx->cancellable = true;
2674 kind = "taskgroup";
2675 break;
2676 default:
2677 error_at (gimple_location (stmt), "invalid arguments");
2678 return false;
2680 if (bad)
2682 error_at (gimple_location (stmt),
2683 "%<%s %s%> construct not closely nested inside of %qs",
2684 construct, kind, bad);
2685 return false;
2688 /* FALLTHRU */
2689 case GIMPLE_OMP_SECTIONS:
2690 case GIMPLE_OMP_SINGLE:
2691 for (; ctx != NULL; ctx = ctx->outer)
2692 switch (gimple_code (ctx->stmt))
2694 case GIMPLE_OMP_FOR:
2695 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2696 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2697 break;
2698 /* FALLTHRU */
2699 case GIMPLE_OMP_SECTIONS:
2700 case GIMPLE_OMP_SINGLE:
2701 case GIMPLE_OMP_ORDERED:
2702 case GIMPLE_OMP_MASTER:
2703 case GIMPLE_OMP_TASK:
2704 case GIMPLE_OMP_CRITICAL:
2705 if (is_gimple_call (stmt))
2707 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2708 != BUILT_IN_GOMP_BARRIER)
2709 return true;
2710 error_at (gimple_location (stmt),
2711 "barrier region may not be closely nested inside "
2712 "of work-sharing, %<critical%>, %<ordered%>, "
2713 "%<master%>, explicit %<task%> or %<taskloop%> "
2714 "region");
2715 return false;
2717 error_at (gimple_location (stmt),
2718 "work-sharing region may not be closely nested inside "
2719 "of work-sharing, %<critical%>, %<ordered%>, "
2720 "%<master%>, explicit %<task%> or %<taskloop%> region");
2721 return false;
2722 case GIMPLE_OMP_PARALLEL:
2723 case GIMPLE_OMP_TEAMS:
2724 return true;
2725 case GIMPLE_OMP_TARGET:
2726 if (gimple_omp_target_kind (ctx->stmt)
2727 == GF_OMP_TARGET_KIND_REGION)
2728 return true;
2729 break;
2730 default:
2731 break;
2733 break;
2734 case GIMPLE_OMP_MASTER:
2735 for (; ctx != NULL; ctx = ctx->outer)
2736 switch (gimple_code (ctx->stmt))
2738 case GIMPLE_OMP_FOR:
2739 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2740 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2741 break;
2742 /* FALLTHRU */
2743 case GIMPLE_OMP_SECTIONS:
2744 case GIMPLE_OMP_SINGLE:
2745 case GIMPLE_OMP_TASK:
2746 error_at (gimple_location (stmt),
2747 "%<master%> region may not be closely nested inside "
2748 "of work-sharing, explicit %<task%> or %<taskloop%> "
2749 "region");
2750 return false;
2751 case GIMPLE_OMP_PARALLEL:
2752 case GIMPLE_OMP_TEAMS:
2753 return true;
2754 case GIMPLE_OMP_TARGET:
2755 if (gimple_omp_target_kind (ctx->stmt)
2756 == GF_OMP_TARGET_KIND_REGION)
2757 return true;
2758 break;
2759 default:
2760 break;
2762 break;
2763 case GIMPLE_OMP_TASK:
2764 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2766 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2767 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2769 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2770 error_at (OMP_CLAUSE_LOCATION (c),
2771 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2772 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2773 return false;
2775 break;
2776 case GIMPLE_OMP_ORDERED:
2777 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2778 c; c = OMP_CLAUSE_CHAIN (c))
2780 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2782 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2783 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2784 continue;
2786 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2787 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2788 || kind == OMP_CLAUSE_DEPEND_SINK)
2790 tree oclause;
2791 /* Look for containing ordered(N) loop. */
2792 if (ctx == NULL
2793 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2794 || (oclause
2795 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2796 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2798 error_at (OMP_CLAUSE_LOCATION (c),
2799 "%<ordered%> construct with %<depend%> clause "
2800 "must be closely nested inside an %<ordered%> "
2801 "loop");
2802 return false;
2804 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2806 error_at (OMP_CLAUSE_LOCATION (c),
2807 "%<ordered%> construct with %<depend%> clause "
2808 "must be closely nested inside a loop with "
2809 "%<ordered%> clause with a parameter");
2810 return false;
2813 else
2815 error_at (OMP_CLAUSE_LOCATION (c),
2816 "invalid depend kind in omp %<ordered%> %<depend%>");
2817 return false;
2820 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2821 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2823 /* ordered simd must be closely nested inside of simd region,
2824 and simd region must not encounter constructs other than
2825 ordered simd, therefore ordered simd may be either orphaned,
2826 or ctx->stmt must be simd. The latter case is handled already
2827 earlier. */
2828 if (ctx != NULL)
2830 error_at (gimple_location (stmt),
2831 "%<ordered%> %<simd%> must be closely nested inside "
2832 "%<simd%> region");
2833 return false;
2836 for (; ctx != NULL; ctx = ctx->outer)
2837 switch (gimple_code (ctx->stmt))
2839 case GIMPLE_OMP_CRITICAL:
2840 case GIMPLE_OMP_TASK:
2841 case GIMPLE_OMP_ORDERED:
2842 ordered_in_taskloop:
2843 error_at (gimple_location (stmt),
2844 "%<ordered%> region may not be closely nested inside "
2845 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2846 "%<taskloop%> region");
2847 return false;
2848 case GIMPLE_OMP_FOR:
2849 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2850 goto ordered_in_taskloop;
2851 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2852 OMP_CLAUSE_ORDERED) == NULL)
2854 error_at (gimple_location (stmt),
2855 "%<ordered%> region must be closely nested inside "
2856 "a loop region with an %<ordered%> clause");
2857 return false;
2859 return true;
2860 case GIMPLE_OMP_TARGET:
2861 if (gimple_omp_target_kind (ctx->stmt)
2862 != GF_OMP_TARGET_KIND_REGION)
2863 break;
2864 /* FALLTHRU */
2865 case GIMPLE_OMP_PARALLEL:
2866 case GIMPLE_OMP_TEAMS:
2867 error_at (gimple_location (stmt),
2868 "%<ordered%> region must be closely nested inside "
2869 "a loop region with an %<ordered%> clause");
2870 return false;
2871 default:
2872 break;
2874 break;
2875 case GIMPLE_OMP_CRITICAL:
2877 tree this_stmt_name
2878 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2879 for (; ctx != NULL; ctx = ctx->outer)
2880 if (gomp_critical *other_crit
2881 = dyn_cast <gomp_critical *> (ctx->stmt))
2882 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2884 error_at (gimple_location (stmt),
2885 "%<critical%> region may not be nested inside "
2886 "a %<critical%> region with the same name");
2887 return false;
2890 break;
2891 case GIMPLE_OMP_TEAMS:
2892 if (ctx == NULL
2893 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2894 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2896 error_at (gimple_location (stmt),
2897 "%<teams%> construct not closely nested inside of "
2898 "%<target%> construct");
2899 return false;
2901 break;
2902 case GIMPLE_OMP_TARGET:
2903 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2904 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2905 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2906 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2908 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2909 error_at (OMP_CLAUSE_LOCATION (c),
2910 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2911 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2912 return false;
2914 if (is_gimple_omp_offloaded (stmt)
2915 && oacc_get_fn_attrib (cfun->decl) != NULL)
2917 error_at (gimple_location (stmt),
2918 "OpenACC region inside of OpenACC routine, nested "
2919 "parallelism not supported yet");
2920 return false;
2922 for (; ctx != NULL; ctx = ctx->outer)
2924 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2926 if (is_gimple_omp (stmt)
2927 && is_gimple_omp_oacc (stmt)
2928 && is_gimple_omp (ctx->stmt))
2930 error_at (gimple_location (stmt),
2931 "OpenACC construct inside of non-OpenACC region");
2932 return false;
2934 continue;
2937 const char *stmt_name, *ctx_stmt_name;
2938 switch (gimple_omp_target_kind (stmt))
2940 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2941 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2942 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2943 case GF_OMP_TARGET_KIND_ENTER_DATA:
2944 stmt_name = "target enter data"; break;
2945 case GF_OMP_TARGET_KIND_EXIT_DATA:
2946 stmt_name = "target exit data"; break;
2947 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2948 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2949 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2950 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2951 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2952 stmt_name = "enter/exit data"; break;
2953 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2954 break;
2955 default: gcc_unreachable ();
2957 switch (gimple_omp_target_kind (ctx->stmt))
2959 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2960 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2961 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2962 ctx_stmt_name = "parallel"; break;
2963 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2964 ctx_stmt_name = "kernels"; break;
2965 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2966 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2967 ctx_stmt_name = "host_data"; break;
2968 default: gcc_unreachable ();
2971 /* OpenACC/OpenMP mismatch? */
2972 if (is_gimple_omp_oacc (stmt)
2973 != is_gimple_omp_oacc (ctx->stmt))
2975 error_at (gimple_location (stmt),
2976 "%s %qs construct inside of %s %qs region",
2977 (is_gimple_omp_oacc (stmt)
2978 ? "OpenACC" : "OpenMP"), stmt_name,
2979 (is_gimple_omp_oacc (ctx->stmt)
2980 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2981 return false;
2983 if (is_gimple_omp_offloaded (ctx->stmt))
2985 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2986 if (is_gimple_omp_oacc (ctx->stmt))
2988 error_at (gimple_location (stmt),
2989 "%qs construct inside of %qs region",
2990 stmt_name, ctx_stmt_name);
2991 return false;
2993 else
2995 warning_at (gimple_location (stmt), 0,
2996 "%qs construct inside of %qs region",
2997 stmt_name, ctx_stmt_name);
3001 break;
3002 default:
3003 break;
3005 return true;
3009 /* Helper function scan_omp.
3011 Callback for walk_tree or operators in walk_gimple_stmt used to
3012 scan for OMP directives in TP. */
3014 static tree
3015 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3017 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3018 omp_context *ctx = (omp_context *) wi->info;
3019 tree t = *tp;
3021 switch (TREE_CODE (t))
3023 case VAR_DECL:
3024 case PARM_DECL:
3025 case LABEL_DECL:
3026 case RESULT_DECL:
3027 if (ctx)
3029 tree repl = remap_decl (t, &ctx->cb);
3030 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3031 *tp = repl;
3033 break;
3035 default:
3036 if (ctx && TYPE_P (t))
3037 *tp = remap_type (t, &ctx->cb);
3038 else if (!DECL_P (t))
3040 *walk_subtrees = 1;
3041 if (ctx)
3043 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3044 if (tem != TREE_TYPE (t))
3046 if (TREE_CODE (t) == INTEGER_CST)
3047 *tp = wide_int_to_tree (tem, t);
3048 else
3049 TREE_TYPE (t) = tem;
3053 break;
3056 return NULL_TREE;
3059 /* Return true if FNDECL is a setjmp or a longjmp. */
3061 static bool
3062 setjmp_or_longjmp_p (const_tree fndecl)
3064 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3065 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3066 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3067 return true;
3069 tree declname = DECL_NAME (fndecl);
3070 if (!declname)
3071 return false;
3072 const char *name = IDENTIFIER_POINTER (declname);
3073 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3077 /* Helper function for scan_omp.
3079 Callback for walk_gimple_stmt used to scan for OMP directives in
3080 the current statement in GSI. */
3082 static tree
3083 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3084 struct walk_stmt_info *wi)
3086 gimple *stmt = gsi_stmt (*gsi);
3087 omp_context *ctx = (omp_context *) wi->info;
3089 if (gimple_has_location (stmt))
3090 input_location = gimple_location (stmt);
3092 /* Check the nesting restrictions. */
3093 bool remove = false;
3094 if (is_gimple_omp (stmt))
3095 remove = !check_omp_nesting_restrictions (stmt, ctx);
3096 else if (is_gimple_call (stmt))
3098 tree fndecl = gimple_call_fndecl (stmt);
3099 if (fndecl)
3101 if (setjmp_or_longjmp_p (fndecl)
3102 && ctx
3103 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3104 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3106 remove = true;
3107 error_at (gimple_location (stmt),
3108 "setjmp/longjmp inside simd construct");
3110 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3111 switch (DECL_FUNCTION_CODE (fndecl))
3113 case BUILT_IN_GOMP_BARRIER:
3114 case BUILT_IN_GOMP_CANCEL:
3115 case BUILT_IN_GOMP_CANCELLATION_POINT:
3116 case BUILT_IN_GOMP_TASKYIELD:
3117 case BUILT_IN_GOMP_TASKWAIT:
3118 case BUILT_IN_GOMP_TASKGROUP_START:
3119 case BUILT_IN_GOMP_TASKGROUP_END:
3120 remove = !check_omp_nesting_restrictions (stmt, ctx);
3121 break;
3122 default:
3123 break;
3127 if (remove)
3129 stmt = gimple_build_nop ();
3130 gsi_replace (gsi, stmt, false);
3133 *handled_ops_p = true;
3135 switch (gimple_code (stmt))
3137 case GIMPLE_OMP_PARALLEL:
3138 taskreg_nesting_level++;
3139 scan_omp_parallel (gsi, ctx);
3140 taskreg_nesting_level--;
3141 break;
3143 case GIMPLE_OMP_TASK:
3144 taskreg_nesting_level++;
3145 scan_omp_task (gsi, ctx);
3146 taskreg_nesting_level--;
3147 break;
3149 case GIMPLE_OMP_FOR:
3150 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3151 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3152 && omp_maybe_offloaded_ctx (ctx)
3153 && omp_max_simt_vf ())
3154 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3155 else
3156 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3157 break;
3159 case GIMPLE_OMP_SECTIONS:
3160 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3161 break;
3163 case GIMPLE_OMP_SINGLE:
3164 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3165 break;
3167 case GIMPLE_OMP_SECTION:
3168 case GIMPLE_OMP_MASTER:
3169 case GIMPLE_OMP_TASKGROUP:
3170 case GIMPLE_OMP_ORDERED:
3171 case GIMPLE_OMP_CRITICAL:
3172 case GIMPLE_OMP_GRID_BODY:
3173 ctx = new_omp_context (stmt, ctx);
3174 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3175 break;
3177 case GIMPLE_OMP_TARGET:
3178 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3179 break;
3181 case GIMPLE_OMP_TEAMS:
3182 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3183 break;
3185 case GIMPLE_BIND:
3187 tree var;
3189 *handled_ops_p = false;
3190 if (ctx)
3191 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3192 var ;
3193 var = DECL_CHAIN (var))
3194 insert_decl_map (&ctx->cb, var, var);
3196 break;
3197 default:
3198 *handled_ops_p = false;
3199 break;
3202 return NULL_TREE;
3206 /* Scan all the statements starting at the current statement. CTX
3207 contains context information about the OMP directives and
3208 clauses found during the scan. */
3210 static void
3211 scan_omp (gimple_seq *body_p, omp_context *ctx)
3213 location_t saved_location;
3214 struct walk_stmt_info wi;
3216 memset (&wi, 0, sizeof (wi));
3217 wi.info = ctx;
3218 wi.want_locations = true;
3220 saved_location = input_location;
3221 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3222 input_location = saved_location;
3225 /* Re-gimplification and code generation routines. */
3227 /* If a context was created for STMT when it was scanned, return it. */
3229 static omp_context *
3230 maybe_lookup_ctx (gimple *stmt)
3232 splay_tree_node n;
3233 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3234 return n ? (omp_context *) n->value : NULL;
3238 /* Find the mapping for DECL in CTX or the immediately enclosing
3239 context that has a mapping for DECL.
3241 If CTX is a nested parallel directive, we may have to use the decl
3242 mappings created in CTX's parent context. Suppose that we have the
3243 following parallel nesting (variable UIDs showed for clarity):
3245 iD.1562 = 0;
3246 #omp parallel shared(iD.1562) -> outer parallel
3247 iD.1562 = iD.1562 + 1;
3249 #omp parallel shared (iD.1562) -> inner parallel
3250 iD.1562 = iD.1562 - 1;
3252 Each parallel structure will create a distinct .omp_data_s structure
3253 for copying iD.1562 in/out of the directive:
3255 outer parallel .omp_data_s.1.i -> iD.1562
3256 inner parallel .omp_data_s.2.i -> iD.1562
3258 A shared variable mapping will produce a copy-out operation before
3259 the parallel directive and a copy-in operation after it. So, in
3260 this case we would have:
3262 iD.1562 = 0;
3263 .omp_data_o.1.i = iD.1562;
3264 #omp parallel shared(iD.1562) -> outer parallel
3265 .omp_data_i.1 = &.omp_data_o.1
3266 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3268 .omp_data_o.2.i = iD.1562; -> **
3269 #omp parallel shared(iD.1562) -> inner parallel
3270 .omp_data_i.2 = &.omp_data_o.2
3271 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3274 ** This is a problem. The symbol iD.1562 cannot be referenced
3275 inside the body of the outer parallel region. But since we are
3276 emitting this copy operation while expanding the inner parallel
3277 directive, we need to access the CTX structure of the outer
3278 parallel directive to get the correct mapping:
3280 .omp_data_o.2.i = .omp_data_i.1->i
3282 Since there may be other workshare or parallel directives enclosing
3283 the parallel directive, it may be necessary to walk up the context
3284 parent chain. This is not a problem in general because nested
3285 parallelism happens only rarely. */
3287 static tree
3288 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3290 tree t;
3291 omp_context *up;
3293 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3294 t = maybe_lookup_decl (decl, up);
3296 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3298 return t ? t : decl;
3302 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3303 in outer contexts. */
3305 static tree
3306 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3308 tree t = NULL;
3309 omp_context *up;
3311 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3312 t = maybe_lookup_decl (decl, up);
3314 return t ? t : decl;
3318 /* Construct the initialization value for reduction operation OP. */
3320 tree
3321 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3323 switch (op)
3325 case PLUS_EXPR:
3326 case MINUS_EXPR:
3327 case BIT_IOR_EXPR:
3328 case BIT_XOR_EXPR:
3329 case TRUTH_OR_EXPR:
3330 case TRUTH_ORIF_EXPR:
3331 case TRUTH_XOR_EXPR:
3332 case NE_EXPR:
3333 return build_zero_cst (type);
3335 case MULT_EXPR:
3336 case TRUTH_AND_EXPR:
3337 case TRUTH_ANDIF_EXPR:
3338 case EQ_EXPR:
3339 return fold_convert_loc (loc, type, integer_one_node);
3341 case BIT_AND_EXPR:
3342 return fold_convert_loc (loc, type, integer_minus_one_node);
3344 case MAX_EXPR:
3345 if (SCALAR_FLOAT_TYPE_P (type))
3347 REAL_VALUE_TYPE max, min;
3348 if (HONOR_INFINITIES (type))
3350 real_inf (&max);
3351 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3353 else
3354 real_maxval (&min, 1, TYPE_MODE (type));
3355 return build_real (type, min);
3357 else if (POINTER_TYPE_P (type))
3359 wide_int min
3360 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3361 return wide_int_to_tree (type, min);
3363 else
3365 gcc_assert (INTEGRAL_TYPE_P (type));
3366 return TYPE_MIN_VALUE (type);
3369 case MIN_EXPR:
3370 if (SCALAR_FLOAT_TYPE_P (type))
3372 REAL_VALUE_TYPE max;
3373 if (HONOR_INFINITIES (type))
3374 real_inf (&max);
3375 else
3376 real_maxval (&max, 0, TYPE_MODE (type));
3377 return build_real (type, max);
3379 else if (POINTER_TYPE_P (type))
3381 wide_int max
3382 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3383 return wide_int_to_tree (type, max);
3385 else
3387 gcc_assert (INTEGRAL_TYPE_P (type));
3388 return TYPE_MAX_VALUE (type);
3391 default:
3392 gcc_unreachable ();
3396 /* Construct the initialization value for reduction CLAUSE. */
3398 tree
3399 omp_reduction_init (tree clause, tree type)
3401 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3402 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3405 /* Return alignment to be assumed for var in CLAUSE, which should be
3406 OMP_CLAUSE_ALIGNED. */
3408 static tree
3409 omp_clause_aligned_alignment (tree clause)
3411 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3412 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3414 /* Otherwise return implementation defined alignment. */
3415 unsigned int al = 1;
3416 machine_mode mode, vmode;
3417 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3418 if (vs)
3419 vs = 1 << floor_log2 (vs);
3420 static enum mode_class classes[]
3421 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3422 for (int i = 0; i < 4; i += 2)
3423 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3424 mode != VOIDmode;
3425 mode = GET_MODE_WIDER_MODE (mode))
3427 vmode = targetm.vectorize.preferred_simd_mode (mode);
3428 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3429 continue;
3430 while (vs
3431 && GET_MODE_SIZE (vmode) < vs
3432 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3433 vmode = GET_MODE_2XWIDER_MODE (vmode);
3435 tree type = lang_hooks.types.type_for_mode (mode, 1);
3436 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3437 continue;
3438 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3439 / GET_MODE_SIZE (mode));
3440 if (TYPE_MODE (type) != vmode)
3441 continue;
3442 if (TYPE_ALIGN_UNIT (type) > al)
3443 al = TYPE_ALIGN_UNIT (type);
3445 return build_int_cst (integer_type_node, al);
3449 /* This structure is part of the interface between lower_rec_simd_input_clauses
3450 and lower_rec_input_clauses. */
3452 struct omplow_simd_context {
3453 tree idx;
3454 tree lane;
3455 int max_vf;
3456 bool is_simt;
3459 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3460 privatization. */
3462 static bool
3463 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3464 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3466 if (sctx->max_vf == 0)
3468 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3469 if (sctx->max_vf > 1)
3471 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3472 OMP_CLAUSE_SAFELEN);
3473 if (c
3474 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3475 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3476 sctx->max_vf = 1;
3477 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3478 sctx->max_vf) == -1)
3479 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3481 if (sctx->max_vf > 1)
3483 sctx->idx = create_tmp_var (unsigned_type_node);
3484 sctx->lane = create_tmp_var (unsigned_type_node);
3487 if (sctx->max_vf == 1)
3488 return false;
3490 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3491 tree avar = create_tmp_var_raw (atype);
3492 if (TREE_ADDRESSABLE (new_var))
3493 TREE_ADDRESSABLE (avar) = 1;
3494 DECL_ATTRIBUTES (avar)
3495 = tree_cons (get_identifier ("omp simd array"), NULL,
3496 DECL_ATTRIBUTES (avar));
3497 gimple_add_tmp_var (avar);
3498 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3499 NULL_TREE, NULL_TREE);
3500 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3501 NULL_TREE, NULL_TREE);
3502 if (DECL_P (new_var))
3504 SET_DECL_VALUE_EXPR (new_var, lvar);
3505 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3507 return true;
3510 /* Helper function of lower_rec_input_clauses. For a reference
3511 in simd reduction, add an underlying variable it will reference. */
3513 static void
3514 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3516 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3517 if (TREE_CONSTANT (z))
3519 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3520 get_name (new_vard));
3521 gimple_add_tmp_var (z);
3522 TREE_ADDRESSABLE (z) = 1;
3523 z = build_fold_addr_expr_loc (loc, z);
3524 gimplify_assign (new_vard, z, ilist);
3528 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3529 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3530 private variables. Initialization statements go in ILIST, while calls
3531 to destructors go in DLIST. */
3533 static void
3534 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3535 omp_context *ctx, struct omp_for_data *fd)
3537 tree c, dtor, copyin_seq, x, ptr;
3538 bool copyin_by_ref = false;
3539 bool lastprivate_firstprivate = false;
3540 bool reduction_omp_orig_ref = false;
3541 int pass;
3542 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3543 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3544 omplow_simd_context sctx = omplow_simd_context ();
3545 tree simt_lane = NULL_TREE;
3546 tree ivar = NULL_TREE, lvar = NULL_TREE;
3547 gimple_seq llist[3] = { };
3549 copyin_seq = NULL;
3550 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3552 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3553 with data sharing clauses referencing variable sized vars. That
3554 is unnecessarily hard to support and very unlikely to result in
3555 vectorized code anyway. */
3556 if (is_simd)
3557 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3558 switch (OMP_CLAUSE_CODE (c))
3560 case OMP_CLAUSE_LINEAR:
3561 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3562 sctx.max_vf = 1;
3563 /* FALLTHRU */
3564 case OMP_CLAUSE_PRIVATE:
3565 case OMP_CLAUSE_FIRSTPRIVATE:
3566 case OMP_CLAUSE_LASTPRIVATE:
3567 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3568 sctx.max_vf = 1;
3569 break;
3570 case OMP_CLAUSE_REDUCTION:
3571 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3572 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3573 sctx.max_vf = 1;
3574 break;
3575 default:
3576 continue;
3579 /* Do all the fixed sized types in the first pass, and the variable sized
3580 types in the second pass. This makes sure that the scalar arguments to
3581 the variable sized types are processed before we use them in the
3582 variable sized operations. */
3583 for (pass = 0; pass < 2; ++pass)
3585 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3587 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3588 tree var, new_var;
3589 bool by_ref;
3590 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3592 switch (c_kind)
3594 case OMP_CLAUSE_PRIVATE:
3595 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3596 continue;
3597 break;
3598 case OMP_CLAUSE_SHARED:
3599 /* Ignore shared directives in teams construct. */
3600 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3601 continue;
3602 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3604 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3605 || is_global_var (OMP_CLAUSE_DECL (c)));
3606 continue;
3608 case OMP_CLAUSE_FIRSTPRIVATE:
3609 case OMP_CLAUSE_COPYIN:
3610 break;
3611 case OMP_CLAUSE_LINEAR:
3612 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3613 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3614 lastprivate_firstprivate = true;
3615 break;
3616 case OMP_CLAUSE_REDUCTION:
3617 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3618 reduction_omp_orig_ref = true;
3619 break;
3620 case OMP_CLAUSE__LOOPTEMP_:
3621 /* Handle _looptemp_ clauses only on parallel/task. */
3622 if (fd)
3623 continue;
3624 break;
3625 case OMP_CLAUSE_LASTPRIVATE:
3626 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3628 lastprivate_firstprivate = true;
3629 if (pass != 0 || is_taskloop_ctx (ctx))
3630 continue;
3632 /* Even without corresponding firstprivate, if
3633 decl is Fortran allocatable, it needs outer var
3634 reference. */
3635 else if (pass == 0
3636 && lang_hooks.decls.omp_private_outer_ref
3637 (OMP_CLAUSE_DECL (c)))
3638 lastprivate_firstprivate = true;
3639 break;
3640 case OMP_CLAUSE_ALIGNED:
3641 if (pass == 0)
3642 continue;
3643 var = OMP_CLAUSE_DECL (c);
3644 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3645 && !is_global_var (var))
3647 new_var = maybe_lookup_decl (var, ctx);
3648 if (new_var == NULL_TREE)
3649 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3650 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3651 tree alarg = omp_clause_aligned_alignment (c);
3652 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3653 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3654 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3655 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3656 gimplify_and_add (x, ilist);
3658 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3659 && is_global_var (var))
3661 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3662 new_var = lookup_decl (var, ctx);
3663 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3664 t = build_fold_addr_expr_loc (clause_loc, t);
3665 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3666 tree alarg = omp_clause_aligned_alignment (c);
3667 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3668 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3669 t = fold_convert_loc (clause_loc, ptype, t);
3670 x = create_tmp_var (ptype);
3671 t = build2 (MODIFY_EXPR, ptype, x, t);
3672 gimplify_and_add (t, ilist);
3673 t = build_simple_mem_ref_loc (clause_loc, x);
3674 SET_DECL_VALUE_EXPR (new_var, t);
3675 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3677 continue;
3678 default:
3679 continue;
3682 new_var = var = OMP_CLAUSE_DECL (c);
3683 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3685 var = TREE_OPERAND (var, 0);
3686 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3687 var = TREE_OPERAND (var, 0);
3688 if (TREE_CODE (var) == INDIRECT_REF
3689 || TREE_CODE (var) == ADDR_EXPR)
3690 var = TREE_OPERAND (var, 0);
3691 if (is_variable_sized (var))
3693 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3694 var = DECL_VALUE_EXPR (var);
3695 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3696 var = TREE_OPERAND (var, 0);
3697 gcc_assert (DECL_P (var));
3699 new_var = var;
3701 if (c_kind != OMP_CLAUSE_COPYIN)
3702 new_var = lookup_decl (var, ctx);
3704 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3706 if (pass != 0)
3707 continue;
3709 /* C/C++ array section reductions. */
3710 else if (c_kind == OMP_CLAUSE_REDUCTION
3711 && var != OMP_CLAUSE_DECL (c))
3713 if (pass == 0)
3714 continue;
3716 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3717 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3718 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3720 tree b = TREE_OPERAND (orig_var, 1);
3721 b = maybe_lookup_decl (b, ctx);
3722 if (b == NULL)
3724 b = TREE_OPERAND (orig_var, 1);
3725 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3727 if (integer_zerop (bias))
3728 bias = b;
3729 else
3731 bias = fold_convert_loc (clause_loc,
3732 TREE_TYPE (b), bias);
3733 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3734 TREE_TYPE (b), b, bias);
3736 orig_var = TREE_OPERAND (orig_var, 0);
3738 if (TREE_CODE (orig_var) == INDIRECT_REF
3739 || TREE_CODE (orig_var) == ADDR_EXPR)
3740 orig_var = TREE_OPERAND (orig_var, 0);
3741 tree d = OMP_CLAUSE_DECL (c);
3742 tree type = TREE_TYPE (d);
3743 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3744 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3745 const char *name = get_name (orig_var);
3746 if (TREE_CONSTANT (v))
3748 x = create_tmp_var_raw (type, name);
3749 gimple_add_tmp_var (x);
3750 TREE_ADDRESSABLE (x) = 1;
3751 x = build_fold_addr_expr_loc (clause_loc, x);
3753 else
3755 tree atmp
3756 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3757 tree t = maybe_lookup_decl (v, ctx);
3758 if (t)
3759 v = t;
3760 else
3761 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3762 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3763 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3764 TREE_TYPE (v), v,
3765 build_int_cst (TREE_TYPE (v), 1));
3766 t = fold_build2_loc (clause_loc, MULT_EXPR,
3767 TREE_TYPE (v), t,
3768 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3769 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3770 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3773 tree ptype = build_pointer_type (TREE_TYPE (type));
3774 x = fold_convert_loc (clause_loc, ptype, x);
3775 tree y = create_tmp_var (ptype, name);
3776 gimplify_assign (y, x, ilist);
3777 x = y;
3778 tree yb = y;
3780 if (!integer_zerop (bias))
3782 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3783 bias);
3784 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3786 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3787 pointer_sized_int_node, yb, bias);
3788 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3789 yb = create_tmp_var (ptype, name);
3790 gimplify_assign (yb, x, ilist);
3791 x = yb;
3794 d = TREE_OPERAND (d, 0);
3795 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3796 d = TREE_OPERAND (d, 0);
3797 if (TREE_CODE (d) == ADDR_EXPR)
3799 if (orig_var != var)
3801 gcc_assert (is_variable_sized (orig_var));
3802 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3804 gimplify_assign (new_var, x, ilist);
3805 tree new_orig_var = lookup_decl (orig_var, ctx);
3806 tree t = build_fold_indirect_ref (new_var);
3807 DECL_IGNORED_P (new_var) = 0;
3808 TREE_THIS_NOTRAP (t);
3809 SET_DECL_VALUE_EXPR (new_orig_var, t);
3810 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3812 else
3814 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3815 build_int_cst (ptype, 0));
3816 SET_DECL_VALUE_EXPR (new_var, x);
3817 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3820 else
3822 gcc_assert (orig_var == var);
3823 if (TREE_CODE (d) == INDIRECT_REF)
3825 x = create_tmp_var (ptype, name);
3826 TREE_ADDRESSABLE (x) = 1;
3827 gimplify_assign (x, yb, ilist);
3828 x = build_fold_addr_expr_loc (clause_loc, x);
3830 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3831 gimplify_assign (new_var, x, ilist);
3833 tree y1 = create_tmp_var (ptype, NULL);
3834 gimplify_assign (y1, y, ilist);
3835 tree i2 = NULL_TREE, y2 = NULL_TREE;
3836 tree body2 = NULL_TREE, end2 = NULL_TREE;
3837 tree y3 = NULL_TREE, y4 = NULL_TREE;
3838 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3840 y2 = create_tmp_var (ptype, NULL);
3841 gimplify_assign (y2, y, ilist);
3842 tree ref = build_outer_var_ref (var, ctx);
3843 /* For ref build_outer_var_ref already performs this. */
3844 if (TREE_CODE (d) == INDIRECT_REF)
3845 gcc_assert (omp_is_reference (var));
3846 else if (TREE_CODE (d) == ADDR_EXPR)
3847 ref = build_fold_addr_expr (ref);
3848 else if (omp_is_reference (var))
3849 ref = build_fold_addr_expr (ref);
3850 ref = fold_convert_loc (clause_loc, ptype, ref);
3851 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3852 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3854 y3 = create_tmp_var (ptype, NULL);
3855 gimplify_assign (y3, unshare_expr (ref), ilist);
3857 if (is_simd)
3859 y4 = create_tmp_var (ptype, NULL);
3860 gimplify_assign (y4, ref, dlist);
3863 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3864 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3865 tree body = create_artificial_label (UNKNOWN_LOCATION);
3866 tree end = create_artificial_label (UNKNOWN_LOCATION);
3867 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3868 if (y2)
3870 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3871 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3872 body2 = create_artificial_label (UNKNOWN_LOCATION);
3873 end2 = create_artificial_label (UNKNOWN_LOCATION);
3874 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3876 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3878 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3879 tree decl_placeholder
3880 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3881 SET_DECL_VALUE_EXPR (decl_placeholder,
3882 build_simple_mem_ref (y1));
3883 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3884 SET_DECL_VALUE_EXPR (placeholder,
3885 y3 ? build_simple_mem_ref (y3)
3886 : error_mark_node);
3887 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3888 x = lang_hooks.decls.omp_clause_default_ctor
3889 (c, build_simple_mem_ref (y1),
3890 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3891 if (x)
3892 gimplify_and_add (x, ilist);
3893 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3895 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3896 lower_omp (&tseq, ctx);
3897 gimple_seq_add_seq (ilist, tseq);
3899 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3900 if (is_simd)
3902 SET_DECL_VALUE_EXPR (decl_placeholder,
3903 build_simple_mem_ref (y2));
3904 SET_DECL_VALUE_EXPR (placeholder,
3905 build_simple_mem_ref (y4));
3906 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3907 lower_omp (&tseq, ctx);
3908 gimple_seq_add_seq (dlist, tseq);
3909 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3911 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3912 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3913 x = lang_hooks.decls.omp_clause_dtor
3914 (c, build_simple_mem_ref (y2));
3915 if (x)
3917 gimple_seq tseq = NULL;
3918 dtor = x;
3919 gimplify_stmt (&dtor, &tseq);
3920 gimple_seq_add_seq (dlist, tseq);
3923 else
3925 x = omp_reduction_init (c, TREE_TYPE (type));
3926 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3928 /* reduction(-:var) sums up the partial results, so it
3929 acts identically to reduction(+:var). */
3930 if (code == MINUS_EXPR)
3931 code = PLUS_EXPR;
3933 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3934 if (is_simd)
3936 x = build2 (code, TREE_TYPE (type),
3937 build_simple_mem_ref (y4),
3938 build_simple_mem_ref (y2));
3939 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3942 gimple *g
3943 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3944 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3945 gimple_seq_add_stmt (ilist, g);
3946 if (y3)
3948 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3949 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3950 gimple_seq_add_stmt (ilist, g);
3952 g = gimple_build_assign (i, PLUS_EXPR, i,
3953 build_int_cst (TREE_TYPE (i), 1));
3954 gimple_seq_add_stmt (ilist, g);
3955 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3956 gimple_seq_add_stmt (ilist, g);
3957 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3958 if (y2)
3960 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3961 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3962 gimple_seq_add_stmt (dlist, g);
3963 if (y4)
3965 g = gimple_build_assign
3966 (y4, POINTER_PLUS_EXPR, y4,
3967 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3968 gimple_seq_add_stmt (dlist, g);
3970 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3971 build_int_cst (TREE_TYPE (i2), 1));
3972 gimple_seq_add_stmt (dlist, g);
3973 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3974 gimple_seq_add_stmt (dlist, g);
3975 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3977 continue;
3979 else if (is_variable_sized (var))
3981 /* For variable sized types, we need to allocate the
3982 actual storage here. Call alloca and store the
3983 result in the pointer decl that we created elsewhere. */
3984 if (pass == 0)
3985 continue;
3987 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3989 gcall *stmt;
3990 tree tmp, atmp;
3992 ptr = DECL_VALUE_EXPR (new_var);
3993 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3994 ptr = TREE_OPERAND (ptr, 0);
3995 gcc_assert (DECL_P (ptr));
3996 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3998 /* void *tmp = __builtin_alloca */
3999 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4000 stmt = gimple_build_call (atmp, 2, x,
4001 size_int (DECL_ALIGN (var)));
4002 tmp = create_tmp_var_raw (ptr_type_node);
4003 gimple_add_tmp_var (tmp);
4004 gimple_call_set_lhs (stmt, tmp);
4006 gimple_seq_add_stmt (ilist, stmt);
4008 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4009 gimplify_assign (ptr, x, ilist);
4012 else if (omp_is_reference (var))
4014 /* For references that are being privatized for Fortran,
4015 allocate new backing storage for the new pointer
4016 variable. This allows us to avoid changing all the
4017 code that expects a pointer to something that expects
4018 a direct variable. */
4019 if (pass == 0)
4020 continue;
4022 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4023 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4025 x = build_receiver_ref (var, false, ctx);
4026 x = build_fold_addr_expr_loc (clause_loc, x);
4028 else if (TREE_CONSTANT (x))
4030 /* For reduction in SIMD loop, defer adding the
4031 initialization of the reference, because if we decide
4032 to use SIMD array for it, the initilization could cause
4033 expansion ICE. */
4034 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4035 x = NULL_TREE;
4036 else
4038 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4039 get_name (var));
4040 gimple_add_tmp_var (x);
4041 TREE_ADDRESSABLE (x) = 1;
4042 x = build_fold_addr_expr_loc (clause_loc, x);
4045 else
4047 tree atmp
4048 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4049 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4050 tree al = size_int (TYPE_ALIGN (rtype));
4051 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4054 if (x)
4056 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4057 gimplify_assign (new_var, x, ilist);
4060 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4062 else if (c_kind == OMP_CLAUSE_REDUCTION
4063 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4065 if (pass == 0)
4066 continue;
4068 else if (pass != 0)
4069 continue;
4071 switch (OMP_CLAUSE_CODE (c))
4073 case OMP_CLAUSE_SHARED:
4074 /* Ignore shared directives in teams construct. */
4075 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4076 continue;
4077 /* Shared global vars are just accessed directly. */
4078 if (is_global_var (new_var))
4079 break;
4080 /* For taskloop firstprivate/lastprivate, represented
4081 as firstprivate and shared clause on the task, new_var
4082 is the firstprivate var. */
4083 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4084 break;
4085 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4086 needs to be delayed until after fixup_child_record_type so
4087 that we get the correct type during the dereference. */
4088 by_ref = use_pointer_for_field (var, ctx);
4089 x = build_receiver_ref (var, by_ref, ctx);
4090 SET_DECL_VALUE_EXPR (new_var, x);
4091 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4093 /* ??? If VAR is not passed by reference, and the variable
4094 hasn't been initialized yet, then we'll get a warning for
4095 the store into the omp_data_s structure. Ideally, we'd be
4096 able to notice this and not store anything at all, but
4097 we're generating code too early. Suppress the warning. */
4098 if (!by_ref)
4099 TREE_NO_WARNING (var) = 1;
4100 break;
4102 case OMP_CLAUSE_LASTPRIVATE:
4103 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4104 break;
4105 /* FALLTHRU */
4107 case OMP_CLAUSE_PRIVATE:
4108 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4109 x = build_outer_var_ref (var, ctx);
4110 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4112 if (is_task_ctx (ctx))
4113 x = build_receiver_ref (var, false, ctx);
4114 else
4115 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4117 else
4118 x = NULL;
4119 do_private:
4120 tree nx;
4121 nx = lang_hooks.decls.omp_clause_default_ctor
4122 (c, unshare_expr (new_var), x);
4123 if (is_simd)
4125 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4126 if ((TREE_ADDRESSABLE (new_var) || nx || y
4127 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4128 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4129 ivar, lvar))
4131 if (nx)
4132 x = lang_hooks.decls.omp_clause_default_ctor
4133 (c, unshare_expr (ivar), x);
4134 if (nx && x)
4135 gimplify_and_add (x, &llist[0]);
4136 if (y)
4138 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4139 if (y)
4141 gimple_seq tseq = NULL;
4143 dtor = y;
4144 gimplify_stmt (&dtor, &tseq);
4145 gimple_seq_add_seq (&llist[1], tseq);
4148 break;
4151 if (nx)
4152 gimplify_and_add (nx, ilist);
4153 /* FALLTHRU */
4155 do_dtor:
4156 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4157 if (x)
4159 gimple_seq tseq = NULL;
4161 dtor = x;
4162 gimplify_stmt (&dtor, &tseq);
4163 gimple_seq_add_seq (dlist, tseq);
4165 break;
4167 case OMP_CLAUSE_LINEAR:
4168 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4169 goto do_firstprivate;
4170 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4171 x = NULL;
4172 else
4173 x = build_outer_var_ref (var, ctx);
4174 goto do_private;
4176 case OMP_CLAUSE_FIRSTPRIVATE:
4177 if (is_task_ctx (ctx))
4179 if (omp_is_reference (var) || is_variable_sized (var))
4180 goto do_dtor;
4181 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4182 ctx))
4183 || use_pointer_for_field (var, NULL))
4185 x = build_receiver_ref (var, false, ctx);
4186 SET_DECL_VALUE_EXPR (new_var, x);
4187 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4188 goto do_dtor;
4191 do_firstprivate:
4192 x = build_outer_var_ref (var, ctx);
4193 if (is_simd)
4195 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4196 && gimple_omp_for_combined_into_p (ctx->stmt))
4198 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4199 tree stept = TREE_TYPE (t);
4200 tree ct = omp_find_clause (clauses,
4201 OMP_CLAUSE__LOOPTEMP_);
4202 gcc_assert (ct);
4203 tree l = OMP_CLAUSE_DECL (ct);
4204 tree n1 = fd->loop.n1;
4205 tree step = fd->loop.step;
4206 tree itype = TREE_TYPE (l);
4207 if (POINTER_TYPE_P (itype))
4208 itype = signed_type_for (itype);
4209 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4210 if (TYPE_UNSIGNED (itype)
4211 && fd->loop.cond_code == GT_EXPR)
4212 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4213 fold_build1 (NEGATE_EXPR, itype, l),
4214 fold_build1 (NEGATE_EXPR,
4215 itype, step));
4216 else
4217 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4218 t = fold_build2 (MULT_EXPR, stept,
4219 fold_convert (stept, l), t);
4221 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4223 x = lang_hooks.decls.omp_clause_linear_ctor
4224 (c, new_var, x, t);
4225 gimplify_and_add (x, ilist);
4226 goto do_dtor;
4229 if (POINTER_TYPE_P (TREE_TYPE (x)))
4230 x = fold_build2 (POINTER_PLUS_EXPR,
4231 TREE_TYPE (x), x, t);
4232 else
4233 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4236 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4237 || TREE_ADDRESSABLE (new_var))
4238 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4239 ivar, lvar))
4241 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4243 tree iv = create_tmp_var (TREE_TYPE (new_var));
4244 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4245 gimplify_and_add (x, ilist);
4246 gimple_stmt_iterator gsi
4247 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4248 gassign *g
4249 = gimple_build_assign (unshare_expr (lvar), iv);
4250 gsi_insert_before_without_update (&gsi, g,
4251 GSI_SAME_STMT);
4252 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4253 enum tree_code code = PLUS_EXPR;
4254 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4255 code = POINTER_PLUS_EXPR;
4256 g = gimple_build_assign (iv, code, iv, t);
4257 gsi_insert_before_without_update (&gsi, g,
4258 GSI_SAME_STMT);
4259 break;
4261 x = lang_hooks.decls.omp_clause_copy_ctor
4262 (c, unshare_expr (ivar), x);
4263 gimplify_and_add (x, &llist[0]);
4264 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4265 if (x)
4267 gimple_seq tseq = NULL;
4269 dtor = x;
4270 gimplify_stmt (&dtor, &tseq);
4271 gimple_seq_add_seq (&llist[1], tseq);
4273 break;
4276 x = lang_hooks.decls.omp_clause_copy_ctor
4277 (c, unshare_expr (new_var), x);
4278 gimplify_and_add (x, ilist);
4279 goto do_dtor;
4281 case OMP_CLAUSE__LOOPTEMP_:
4282 gcc_assert (is_taskreg_ctx (ctx));
4283 x = build_outer_var_ref (var, ctx);
4284 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4285 gimplify_and_add (x, ilist);
4286 break;
4288 case OMP_CLAUSE_COPYIN:
4289 by_ref = use_pointer_for_field (var, NULL);
4290 x = build_receiver_ref (var, by_ref, ctx);
4291 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4292 append_to_statement_list (x, &copyin_seq);
4293 copyin_by_ref |= by_ref;
4294 break;
4296 case OMP_CLAUSE_REDUCTION:
4297 /* OpenACC reductions are initialized using the
4298 GOACC_REDUCTION internal function. */
4299 if (is_gimple_omp_oacc (ctx->stmt))
4300 break;
4301 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4303 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4304 gimple *tseq;
4305 x = build_outer_var_ref (var, ctx);
4307 if (omp_is_reference (var)
4308 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4309 TREE_TYPE (x)))
4310 x = build_fold_addr_expr_loc (clause_loc, x);
4311 SET_DECL_VALUE_EXPR (placeholder, x);
4312 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4313 tree new_vard = new_var;
4314 if (omp_is_reference (var))
4316 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4317 new_vard = TREE_OPERAND (new_var, 0);
4318 gcc_assert (DECL_P (new_vard));
4320 if (is_simd
4321 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4322 ivar, lvar))
4324 if (new_vard == new_var)
4326 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4327 SET_DECL_VALUE_EXPR (new_var, ivar);
4329 else
4331 SET_DECL_VALUE_EXPR (new_vard,
4332 build_fold_addr_expr (ivar));
4333 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4335 x = lang_hooks.decls.omp_clause_default_ctor
4336 (c, unshare_expr (ivar),
4337 build_outer_var_ref (var, ctx));
4338 if (x)
4339 gimplify_and_add (x, &llist[0]);
4340 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4342 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4343 lower_omp (&tseq, ctx);
4344 gimple_seq_add_seq (&llist[0], tseq);
4346 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4347 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4348 lower_omp (&tseq, ctx);
4349 gimple_seq_add_seq (&llist[1], tseq);
4350 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4351 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4352 if (new_vard == new_var)
4353 SET_DECL_VALUE_EXPR (new_var, lvar);
4354 else
4355 SET_DECL_VALUE_EXPR (new_vard,
4356 build_fold_addr_expr (lvar));
4357 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4358 if (x)
4360 tseq = NULL;
4361 dtor = x;
4362 gimplify_stmt (&dtor, &tseq);
4363 gimple_seq_add_seq (&llist[1], tseq);
4365 break;
4367 /* If this is a reference to constant size reduction var
4368 with placeholder, we haven't emitted the initializer
4369 for it because it is undesirable if SIMD arrays are used.
4370 But if they aren't used, we need to emit the deferred
4371 initialization now. */
4372 else if (omp_is_reference (var) && is_simd)
4373 handle_simd_reference (clause_loc, new_vard, ilist);
4374 x = lang_hooks.decls.omp_clause_default_ctor
4375 (c, unshare_expr (new_var),
4376 build_outer_var_ref (var, ctx));
4377 if (x)
4378 gimplify_and_add (x, ilist);
4379 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4381 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4382 lower_omp (&tseq, ctx);
4383 gimple_seq_add_seq (ilist, tseq);
4385 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4386 if (is_simd)
4388 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4389 lower_omp (&tseq, ctx);
4390 gimple_seq_add_seq (dlist, tseq);
4391 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4393 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4394 goto do_dtor;
4396 else
4398 x = omp_reduction_init (c, TREE_TYPE (new_var));
4399 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4400 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4402 /* reduction(-:var) sums up the partial results, so it
4403 acts identically to reduction(+:var). */
4404 if (code == MINUS_EXPR)
4405 code = PLUS_EXPR;
4407 tree new_vard = new_var;
4408 if (is_simd && omp_is_reference (var))
4410 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4411 new_vard = TREE_OPERAND (new_var, 0);
4412 gcc_assert (DECL_P (new_vard));
4414 if (is_simd
4415 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4416 ivar, lvar))
4418 tree ref = build_outer_var_ref (var, ctx);
4420 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4422 if (sctx.is_simt)
4424 if (!simt_lane)
4425 simt_lane = create_tmp_var (unsigned_type_node);
4426 x = build_call_expr_internal_loc
4427 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4428 TREE_TYPE (ivar), 2, ivar, simt_lane);
4429 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4430 gimplify_assign (ivar, x, &llist[2]);
4432 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4433 ref = build_outer_var_ref (var, ctx);
4434 gimplify_assign (ref, x, &llist[1]);
4436 if (new_vard != new_var)
4438 SET_DECL_VALUE_EXPR (new_vard,
4439 build_fold_addr_expr (lvar));
4440 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4443 else
4445 if (omp_is_reference (var) && is_simd)
4446 handle_simd_reference (clause_loc, new_vard, ilist);
4447 gimplify_assign (new_var, x, ilist);
4448 if (is_simd)
4450 tree ref = build_outer_var_ref (var, ctx);
4452 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4453 ref = build_outer_var_ref (var, ctx);
4454 gimplify_assign (ref, x, dlist);
4458 break;
4460 default:
4461 gcc_unreachable ();
4466 if (sctx.lane)
4468 tree uid = create_tmp_var (ptr_type_node, "simduid");
4469 /* Don't want uninit warnings on simduid, it is always uninitialized,
4470 but we use it not for the value, but for the DECL_UID only. */
4471 TREE_NO_WARNING (uid) = 1;
4472 gimple *g
4473 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4474 gimple_call_set_lhs (g, sctx.lane);
4475 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4476 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4477 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4478 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4479 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4480 gimple_omp_for_set_clauses (ctx->stmt, c);
4481 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4482 build_int_cst (unsigned_type_node, 0));
4483 gimple_seq_add_stmt (ilist, g);
4484 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4485 if (llist[2])
4487 tree simt_vf = create_tmp_var (unsigned_type_node);
4488 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4489 gimple_call_set_lhs (g, simt_vf);
4490 gimple_seq_add_stmt (dlist, g);
4492 tree t = build_int_cst (unsigned_type_node, 1);
4493 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4494 gimple_seq_add_stmt (dlist, g);
4496 t = build_int_cst (unsigned_type_node, 0);
4497 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4498 gimple_seq_add_stmt (dlist, g);
4500 tree body = create_artificial_label (UNKNOWN_LOCATION);
4501 tree header = create_artificial_label (UNKNOWN_LOCATION);
4502 tree end = create_artificial_label (UNKNOWN_LOCATION);
4503 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4504 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4506 gimple_seq_add_seq (dlist, llist[2]);
4508 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4509 gimple_seq_add_stmt (dlist, g);
4511 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4512 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4513 gimple_seq_add_stmt (dlist, g);
4515 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4517 for (int i = 0; i < 2; i++)
4518 if (llist[i])
4520 tree vf = create_tmp_var (unsigned_type_node);
4521 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4522 gimple_call_set_lhs (g, vf);
4523 gimple_seq *seq = i == 0 ? ilist : dlist;
4524 gimple_seq_add_stmt (seq, g);
4525 tree t = build_int_cst (unsigned_type_node, 0);
4526 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4527 gimple_seq_add_stmt (seq, g);
4528 tree body = create_artificial_label (UNKNOWN_LOCATION);
4529 tree header = create_artificial_label (UNKNOWN_LOCATION);
4530 tree end = create_artificial_label (UNKNOWN_LOCATION);
4531 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4532 gimple_seq_add_stmt (seq, gimple_build_label (body));
4533 gimple_seq_add_seq (seq, llist[i]);
4534 t = build_int_cst (unsigned_type_node, 1);
4535 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4536 gimple_seq_add_stmt (seq, g);
4537 gimple_seq_add_stmt (seq, gimple_build_label (header));
4538 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4539 gimple_seq_add_stmt (seq, g);
4540 gimple_seq_add_stmt (seq, gimple_build_label (end));
4544 /* The copyin sequence is not to be executed by the main thread, since
4545 that would result in self-copies. Perhaps not visible to scalars,
4546 but it certainly is to C++ operator=. */
4547 if (copyin_seq)
4549 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4551 x = build2 (NE_EXPR, boolean_type_node, x,
4552 build_int_cst (TREE_TYPE (x), 0));
4553 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4554 gimplify_and_add (x, ilist);
4557 /* If any copyin variable is passed by reference, we must ensure the
4558 master thread doesn't modify it before it is copied over in all
4559 threads. Similarly for variables in both firstprivate and
4560 lastprivate clauses we need to ensure the lastprivate copying
4561 happens after firstprivate copying in all threads. And similarly
4562 for UDRs if initializer expression refers to omp_orig. */
4563 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4565 /* Don't add any barrier for #pragma omp simd or
4566 #pragma omp distribute. */
4567 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4568 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4569 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4572 /* If max_vf is non-zero, then we can use only a vectorization factor
4573 up to the max_vf we chose. So stick it into the safelen clause. */
4574 if (sctx.max_vf)
4576 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4577 OMP_CLAUSE_SAFELEN);
4578 if (c == NULL_TREE
4579 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4580 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4581 sctx.max_vf) == 1))
4583 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4584 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4585 sctx.max_vf);
4586 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4587 gimple_omp_for_set_clauses (ctx->stmt, c);
4593 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4594 both parallel and workshare constructs. PREDICATE may be NULL if it's
4595 always true. */
4597 static void
4598 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4599 omp_context *ctx)
4601 tree x, c, label = NULL, orig_clauses = clauses;
4602 bool par_clauses = false;
4603 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4605 /* Early exit if there are no lastprivate or linear clauses. */
4606 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4607 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4608 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4609 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4610 break;
4611 if (clauses == NULL)
4613 /* If this was a workshare clause, see if it had been combined
4614 with its parallel. In that case, look for the clauses on the
4615 parallel statement itself. */
4616 if (is_parallel_ctx (ctx))
4617 return;
4619 ctx = ctx->outer;
4620 if (ctx == NULL || !is_parallel_ctx (ctx))
4621 return;
4623 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4624 OMP_CLAUSE_LASTPRIVATE);
4625 if (clauses == NULL)
4626 return;
4627 par_clauses = true;
4630 bool maybe_simt = false;
4631 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4632 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4634 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4635 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4636 if (simduid)
4637 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4640 if (predicate)
4642 gcond *stmt;
4643 tree label_true, arm1, arm2;
4644 enum tree_code pred_code = TREE_CODE (predicate);
4646 label = create_artificial_label (UNKNOWN_LOCATION);
4647 label_true = create_artificial_label (UNKNOWN_LOCATION);
4648 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4650 arm1 = TREE_OPERAND (predicate, 0);
4651 arm2 = TREE_OPERAND (predicate, 1);
4652 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4653 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4655 else
4657 arm1 = predicate;
4658 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4659 arm2 = boolean_false_node;
4660 pred_code = NE_EXPR;
4662 if (maybe_simt)
4664 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4665 c = fold_convert (integer_type_node, c);
4666 simtcond = create_tmp_var (integer_type_node);
4667 gimplify_assign (simtcond, c, stmt_list);
4668 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4669 1, simtcond);
4670 c = create_tmp_var (integer_type_node);
4671 gimple_call_set_lhs (g, c);
4672 gimple_seq_add_stmt (stmt_list, g);
4673 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4674 label_true, label);
4676 else
4677 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4678 gimple_seq_add_stmt (stmt_list, stmt);
4679 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4682 for (c = clauses; c ;)
4684 tree var, new_var;
4685 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4687 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4688 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4689 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4691 var = OMP_CLAUSE_DECL (c);
4692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4693 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4694 && is_taskloop_ctx (ctx))
4696 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4697 new_var = lookup_decl (var, ctx->outer);
4699 else
4701 new_var = lookup_decl (var, ctx);
4702 /* Avoid uninitialized warnings for lastprivate and
4703 for linear iterators. */
4704 if (predicate
4705 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4706 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4707 TREE_NO_WARNING (new_var) = 1;
4710 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4712 tree val = DECL_VALUE_EXPR (new_var);
4713 if (TREE_CODE (val) == ARRAY_REF
4714 && VAR_P (TREE_OPERAND (val, 0))
4715 && lookup_attribute ("omp simd array",
4716 DECL_ATTRIBUTES (TREE_OPERAND (val,
4717 0))))
4719 if (lastlane == NULL)
4721 lastlane = create_tmp_var (unsigned_type_node);
4722 gcall *g
4723 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4724 2, simduid,
4725 TREE_OPERAND (val, 1));
4726 gimple_call_set_lhs (g, lastlane);
4727 gimple_seq_add_stmt (stmt_list, g);
4729 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4730 TREE_OPERAND (val, 0), lastlane,
4731 NULL_TREE, NULL_TREE);
4732 if (maybe_simt)
4734 gcall *g;
4735 if (simtlast == NULL)
4737 simtlast = create_tmp_var (unsigned_type_node);
4738 g = gimple_build_call_internal
4739 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4740 gimple_call_set_lhs (g, simtlast);
4741 gimple_seq_add_stmt (stmt_list, g);
4743 x = build_call_expr_internal_loc
4744 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4745 TREE_TYPE (new_var), 2, new_var, simtlast);
4746 new_var = unshare_expr (new_var);
4747 gimplify_assign (new_var, x, stmt_list);
4748 new_var = unshare_expr (new_var);
4753 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4754 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4756 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4757 gimple_seq_add_seq (stmt_list,
4758 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4759 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4761 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4762 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4764 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4765 gimple_seq_add_seq (stmt_list,
4766 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4767 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4770 x = NULL_TREE;
4771 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4772 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4774 gcc_checking_assert (is_taskloop_ctx (ctx));
4775 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4776 ctx->outer->outer);
4777 if (is_global_var (ovar))
4778 x = ovar;
4780 if (!x)
4781 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4782 if (omp_is_reference (var))
4783 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4784 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4785 gimplify_and_add (x, stmt_list);
4787 c = OMP_CLAUSE_CHAIN (c);
4788 if (c == NULL && !par_clauses)
4790 /* If this was a workshare clause, see if it had been combined
4791 with its parallel. In that case, continue looking for the
4792 clauses also on the parallel statement itself. */
4793 if (is_parallel_ctx (ctx))
4794 break;
4796 ctx = ctx->outer;
4797 if (ctx == NULL || !is_parallel_ctx (ctx))
4798 break;
4800 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4801 OMP_CLAUSE_LASTPRIVATE);
4802 par_clauses = true;
4806 if (label)
4807 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4810 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4811 (which might be a placeholder). INNER is true if this is an inner
4812 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4813 join markers. Generate the before-loop forking sequence in
4814 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4815 general form of these sequences is
4817 GOACC_REDUCTION_SETUP
4818 GOACC_FORK
4819 GOACC_REDUCTION_INIT
4821 GOACC_REDUCTION_FINI
4822 GOACC_JOIN
4823 GOACC_REDUCTION_TEARDOWN. */
4825 static void
4826 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4827 gcall *fork, gcall *join, gimple_seq *fork_seq,
4828 gimple_seq *join_seq, omp_context *ctx)
4830 gimple_seq before_fork = NULL;
4831 gimple_seq after_fork = NULL;
4832 gimple_seq before_join = NULL;
4833 gimple_seq after_join = NULL;
4834 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4835 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4836 unsigned offset = 0;
4838 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4839 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4841 tree orig = OMP_CLAUSE_DECL (c);
4842 tree var = maybe_lookup_decl (orig, ctx);
4843 tree ref_to_res = NULL_TREE;
4844 tree incoming, outgoing, v1, v2, v3;
4845 bool is_private = false;
4847 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4848 if (rcode == MINUS_EXPR)
4849 rcode = PLUS_EXPR;
4850 else if (rcode == TRUTH_ANDIF_EXPR)
4851 rcode = BIT_AND_EXPR;
4852 else if (rcode == TRUTH_ORIF_EXPR)
4853 rcode = BIT_IOR_EXPR;
4854 tree op = build_int_cst (unsigned_type_node, rcode);
4856 if (!var)
4857 var = orig;
4859 incoming = outgoing = var;
4861 if (!inner)
4863 /* See if an outer construct also reduces this variable. */
4864 omp_context *outer = ctx;
4866 while (omp_context *probe = outer->outer)
4868 enum gimple_code type = gimple_code (probe->stmt);
4869 tree cls;
4871 switch (type)
4873 case GIMPLE_OMP_FOR:
4874 cls = gimple_omp_for_clauses (probe->stmt);
4875 break;
4877 case GIMPLE_OMP_TARGET:
4878 if (gimple_omp_target_kind (probe->stmt)
4879 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4880 goto do_lookup;
4882 cls = gimple_omp_target_clauses (probe->stmt);
4883 break;
4885 default:
4886 goto do_lookup;
4889 outer = probe;
4890 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4891 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4892 && orig == OMP_CLAUSE_DECL (cls))
4894 incoming = outgoing = lookup_decl (orig, probe);
4895 goto has_outer_reduction;
4897 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4898 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4899 && orig == OMP_CLAUSE_DECL (cls))
4901 is_private = true;
4902 goto do_lookup;
4906 do_lookup:
4907 /* This is the outermost construct with this reduction,
4908 see if there's a mapping for it. */
4909 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4910 && maybe_lookup_field (orig, outer) && !is_private)
4912 ref_to_res = build_receiver_ref (orig, false, outer);
4913 if (omp_is_reference (orig))
4914 ref_to_res = build_simple_mem_ref (ref_to_res);
4916 tree type = TREE_TYPE (var);
4917 if (POINTER_TYPE_P (type))
4918 type = TREE_TYPE (type);
4920 outgoing = var;
4921 incoming = omp_reduction_init_op (loc, rcode, type);
4923 else
4925 /* Try to look at enclosing contexts for reduction var,
4926 use original if no mapping found. */
4927 tree t = NULL_TREE;
4928 omp_context *c = ctx->outer;
4929 while (c && !t)
4931 t = maybe_lookup_decl (orig, c);
4932 c = c->outer;
4934 incoming = outgoing = (t ? t : orig);
4937 has_outer_reduction:;
4940 if (!ref_to_res)
4941 ref_to_res = integer_zero_node;
4943 if (omp_is_reference (orig))
4945 tree type = TREE_TYPE (var);
4946 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4948 if (!inner)
4950 tree x = create_tmp_var (TREE_TYPE (type), id);
4951 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4954 v1 = create_tmp_var (type, id);
4955 v2 = create_tmp_var (type, id);
4956 v3 = create_tmp_var (type, id);
4958 gimplify_assign (v1, var, fork_seq);
4959 gimplify_assign (v2, var, fork_seq);
4960 gimplify_assign (v3, var, fork_seq);
4962 var = build_simple_mem_ref (var);
4963 v1 = build_simple_mem_ref (v1);
4964 v2 = build_simple_mem_ref (v2);
4965 v3 = build_simple_mem_ref (v3);
4966 outgoing = build_simple_mem_ref (outgoing);
4968 if (!TREE_CONSTANT (incoming))
4969 incoming = build_simple_mem_ref (incoming);
4971 else
4972 v1 = v2 = v3 = var;
4974 /* Determine position in reduction buffer, which may be used
4975 by target. */
4976 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
4977 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
4978 offset = (offset + align - 1) & ~(align - 1);
4979 tree off = build_int_cst (sizetype, offset);
4980 offset += GET_MODE_SIZE (mode);
4982 if (!init_code)
4984 init_code = build_int_cst (integer_type_node,
4985 IFN_GOACC_REDUCTION_INIT);
4986 fini_code = build_int_cst (integer_type_node,
4987 IFN_GOACC_REDUCTION_FINI);
4988 setup_code = build_int_cst (integer_type_node,
4989 IFN_GOACC_REDUCTION_SETUP);
4990 teardown_code = build_int_cst (integer_type_node,
4991 IFN_GOACC_REDUCTION_TEARDOWN);
4994 tree setup_call
4995 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
4996 TREE_TYPE (var), 6, setup_code,
4997 unshare_expr (ref_to_res),
4998 incoming, level, op, off);
4999 tree init_call
5000 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5001 TREE_TYPE (var), 6, init_code,
5002 unshare_expr (ref_to_res),
5003 v1, level, op, off);
5004 tree fini_call
5005 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5006 TREE_TYPE (var), 6, fini_code,
5007 unshare_expr (ref_to_res),
5008 v2, level, op, off);
5009 tree teardown_call
5010 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5011 TREE_TYPE (var), 6, teardown_code,
5012 ref_to_res, v3, level, op, off);
5014 gimplify_assign (v1, setup_call, &before_fork);
5015 gimplify_assign (v2, init_call, &after_fork);
5016 gimplify_assign (v3, fini_call, &before_join);
5017 gimplify_assign (outgoing, teardown_call, &after_join);
5020 /* Now stitch things together. */
5021 gimple_seq_add_seq (fork_seq, before_fork);
5022 if (fork)
5023 gimple_seq_add_stmt (fork_seq, fork);
5024 gimple_seq_add_seq (fork_seq, after_fork);
5026 gimple_seq_add_seq (join_seq, before_join);
5027 if (join)
5028 gimple_seq_add_stmt (join_seq, join);
5029 gimple_seq_add_seq (join_seq, after_join);
5032 /* Generate code to implement the REDUCTION clauses. */
5034 static void
5035 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5037 gimple_seq sub_seq = NULL;
5038 gimple *stmt;
5039 tree x, c;
5040 int count = 0;
5042 /* OpenACC loop reductions are handled elsewhere. */
5043 if (is_gimple_omp_oacc (ctx->stmt))
5044 return;
5046 /* SIMD reductions are handled in lower_rec_input_clauses. */
5047 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5048 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5049 return;
5051 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5052 update in that case, otherwise use a lock. */
5053 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5054 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5056 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5057 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5059 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5060 count = -1;
5061 break;
5063 count++;
5066 if (count == 0)
5067 return;
5069 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5071 tree var, ref, new_var, orig_var;
5072 enum tree_code code;
5073 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5075 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5076 continue;
5078 orig_var = var = OMP_CLAUSE_DECL (c);
5079 if (TREE_CODE (var) == MEM_REF)
5081 var = TREE_OPERAND (var, 0);
5082 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5083 var = TREE_OPERAND (var, 0);
5084 if (TREE_CODE (var) == INDIRECT_REF
5085 || TREE_CODE (var) == ADDR_EXPR)
5086 var = TREE_OPERAND (var, 0);
5087 orig_var = var;
5088 if (is_variable_sized (var))
5090 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5091 var = DECL_VALUE_EXPR (var);
5092 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5093 var = TREE_OPERAND (var, 0);
5094 gcc_assert (DECL_P (var));
5097 new_var = lookup_decl (var, ctx);
5098 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5099 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5100 ref = build_outer_var_ref (var, ctx);
5101 code = OMP_CLAUSE_REDUCTION_CODE (c);
5103 /* reduction(-:var) sums up the partial results, so it acts
5104 identically to reduction(+:var). */
5105 if (code == MINUS_EXPR)
5106 code = PLUS_EXPR;
5108 if (count == 1)
5110 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5112 addr = save_expr (addr);
5113 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5114 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5115 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5116 gimplify_and_add (x, stmt_seqp);
5117 return;
5119 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5121 tree d = OMP_CLAUSE_DECL (c);
5122 tree type = TREE_TYPE (d);
5123 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5124 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5125 tree ptype = build_pointer_type (TREE_TYPE (type));
5126 tree bias = TREE_OPERAND (d, 1);
5127 d = TREE_OPERAND (d, 0);
5128 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5130 tree b = TREE_OPERAND (d, 1);
5131 b = maybe_lookup_decl (b, ctx);
5132 if (b == NULL)
5134 b = TREE_OPERAND (d, 1);
5135 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5137 if (integer_zerop (bias))
5138 bias = b;
5139 else
5141 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5142 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5143 TREE_TYPE (b), b, bias);
5145 d = TREE_OPERAND (d, 0);
5147 /* For ref build_outer_var_ref already performs this, so
5148 only new_var needs a dereference. */
5149 if (TREE_CODE (d) == INDIRECT_REF)
5151 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5152 gcc_assert (omp_is_reference (var) && var == orig_var);
5154 else if (TREE_CODE (d) == ADDR_EXPR)
5156 if (orig_var == var)
5158 new_var = build_fold_addr_expr (new_var);
5159 ref = build_fold_addr_expr (ref);
5162 else
5164 gcc_assert (orig_var == var);
5165 if (omp_is_reference (var))
5166 ref = build_fold_addr_expr (ref);
5168 if (DECL_P (v))
5170 tree t = maybe_lookup_decl (v, ctx);
5171 if (t)
5172 v = t;
5173 else
5174 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5175 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5177 if (!integer_zerop (bias))
5179 bias = fold_convert_loc (clause_loc, sizetype, bias);
5180 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5181 TREE_TYPE (new_var), new_var,
5182 unshare_expr (bias));
5183 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5184 TREE_TYPE (ref), ref, bias);
5186 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5187 ref = fold_convert_loc (clause_loc, ptype, ref);
5188 tree m = create_tmp_var (ptype, NULL);
5189 gimplify_assign (m, new_var, stmt_seqp);
5190 new_var = m;
5191 m = create_tmp_var (ptype, NULL);
5192 gimplify_assign (m, ref, stmt_seqp);
5193 ref = m;
5194 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5195 tree body = create_artificial_label (UNKNOWN_LOCATION);
5196 tree end = create_artificial_label (UNKNOWN_LOCATION);
5197 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5198 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5199 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5200 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5202 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5203 tree decl_placeholder
5204 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5205 SET_DECL_VALUE_EXPR (placeholder, out);
5206 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5207 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5208 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5209 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5210 gimple_seq_add_seq (&sub_seq,
5211 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5212 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5213 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5214 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5216 else
5218 x = build2 (code, TREE_TYPE (out), out, priv);
5219 out = unshare_expr (out);
5220 gimplify_assign (out, x, &sub_seq);
5222 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5223 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5224 gimple_seq_add_stmt (&sub_seq, g);
5225 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5226 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5227 gimple_seq_add_stmt (&sub_seq, g);
5228 g = gimple_build_assign (i, PLUS_EXPR, i,
5229 build_int_cst (TREE_TYPE (i), 1));
5230 gimple_seq_add_stmt (&sub_seq, g);
5231 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5232 gimple_seq_add_stmt (&sub_seq, g);
5233 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5235 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5237 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5239 if (omp_is_reference (var)
5240 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5241 TREE_TYPE (ref)))
5242 ref = build_fold_addr_expr_loc (clause_loc, ref);
5243 SET_DECL_VALUE_EXPR (placeholder, ref);
5244 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5245 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5246 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5247 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5248 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5250 else
5252 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5253 ref = build_outer_var_ref (var, ctx);
5254 gimplify_assign (ref, x, &sub_seq);
5258 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5260 gimple_seq_add_stmt (stmt_seqp, stmt);
5262 gimple_seq_add_seq (stmt_seqp, sub_seq);
5264 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5266 gimple_seq_add_stmt (stmt_seqp, stmt);
5270 /* Generate code to implement the COPYPRIVATE clauses. */
5272 static void
5273 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5274 omp_context *ctx)
5276 tree c;
5278 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5280 tree var, new_var, ref, x;
5281 bool by_ref;
5282 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5284 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5285 continue;
5287 var = OMP_CLAUSE_DECL (c);
5288 by_ref = use_pointer_for_field (var, NULL);
5290 ref = build_sender_ref (var, ctx);
5291 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5292 if (by_ref)
5294 x = build_fold_addr_expr_loc (clause_loc, new_var);
5295 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5297 gimplify_assign (ref, x, slist);
5299 ref = build_receiver_ref (var, false, ctx);
5300 if (by_ref)
5302 ref = fold_convert_loc (clause_loc,
5303 build_pointer_type (TREE_TYPE (new_var)),
5304 ref);
5305 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5307 if (omp_is_reference (var))
5309 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5310 ref = build_simple_mem_ref_loc (clause_loc, ref);
5311 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5313 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5314 gimplify_and_add (x, rlist);
5319 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5320 and REDUCTION from the sender (aka parent) side. */
5322 static void
5323 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5324 omp_context *ctx)
5326 tree c, t;
5327 int ignored_looptemp = 0;
5328 bool is_taskloop = false;
5330 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5331 by GOMP_taskloop. */
5332 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5334 ignored_looptemp = 2;
5335 is_taskloop = true;
5338 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5340 tree val, ref, x, var;
5341 bool by_ref, do_in = false, do_out = false;
5342 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5344 switch (OMP_CLAUSE_CODE (c))
5346 case OMP_CLAUSE_PRIVATE:
5347 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5348 break;
5349 continue;
5350 case OMP_CLAUSE_FIRSTPRIVATE:
5351 case OMP_CLAUSE_COPYIN:
5352 case OMP_CLAUSE_LASTPRIVATE:
5353 case OMP_CLAUSE_REDUCTION:
5354 break;
5355 case OMP_CLAUSE_SHARED:
5356 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5357 break;
5358 continue;
5359 case OMP_CLAUSE__LOOPTEMP_:
5360 if (ignored_looptemp)
5362 ignored_looptemp--;
5363 continue;
5365 break;
5366 default:
5367 continue;
5370 val = OMP_CLAUSE_DECL (c);
5371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5372 && TREE_CODE (val) == MEM_REF)
5374 val = TREE_OPERAND (val, 0);
5375 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5376 val = TREE_OPERAND (val, 0);
5377 if (TREE_CODE (val) == INDIRECT_REF
5378 || TREE_CODE (val) == ADDR_EXPR)
5379 val = TREE_OPERAND (val, 0);
5380 if (is_variable_sized (val))
5381 continue;
5384 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5385 outer taskloop region. */
5386 omp_context *ctx_for_o = ctx;
5387 if (is_taskloop
5388 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5389 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5390 ctx_for_o = ctx->outer;
5392 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5394 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5395 && is_global_var (var))
5396 continue;
5398 t = omp_member_access_dummy_var (var);
5399 if (t)
5401 var = DECL_VALUE_EXPR (var);
5402 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5403 if (o != t)
5404 var = unshare_and_remap (var, t, o);
5405 else
5406 var = unshare_expr (var);
5409 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5411 /* Handle taskloop firstprivate/lastprivate, where the
5412 lastprivate on GIMPLE_OMP_TASK is represented as
5413 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5414 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5415 x = omp_build_component_ref (ctx->sender_decl, f);
5416 if (use_pointer_for_field (val, ctx))
5417 var = build_fold_addr_expr (var);
5418 gimplify_assign (x, var, ilist);
5419 DECL_ABSTRACT_ORIGIN (f) = NULL;
5420 continue;
5423 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5424 || val == OMP_CLAUSE_DECL (c))
5425 && is_variable_sized (val))
5426 continue;
5427 by_ref = use_pointer_for_field (val, NULL);
5429 switch (OMP_CLAUSE_CODE (c))
5431 case OMP_CLAUSE_FIRSTPRIVATE:
5432 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5433 && !by_ref
5434 && is_task_ctx (ctx))
5435 TREE_NO_WARNING (var) = 1;
5436 do_in = true;
5437 break;
5439 case OMP_CLAUSE_PRIVATE:
5440 case OMP_CLAUSE_COPYIN:
5441 case OMP_CLAUSE__LOOPTEMP_:
5442 do_in = true;
5443 break;
5445 case OMP_CLAUSE_LASTPRIVATE:
5446 if (by_ref || omp_is_reference (val))
5448 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5449 continue;
5450 do_in = true;
5452 else
5454 do_out = true;
5455 if (lang_hooks.decls.omp_private_outer_ref (val))
5456 do_in = true;
5458 break;
5460 case OMP_CLAUSE_REDUCTION:
5461 do_in = true;
5462 if (val == OMP_CLAUSE_DECL (c))
5463 do_out = !(by_ref || omp_is_reference (val));
5464 else
5465 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5466 break;
5468 default:
5469 gcc_unreachable ();
5472 if (do_in)
5474 ref = build_sender_ref (val, ctx);
5475 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5476 gimplify_assign (ref, x, ilist);
5477 if (is_task_ctx (ctx))
5478 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5481 if (do_out)
5483 ref = build_sender_ref (val, ctx);
5484 gimplify_assign (var, ref, olist);
5489 /* Generate code to implement SHARED from the sender (aka parent)
5490 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5491 list things that got automatically shared. */
5493 static void
5494 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5496 tree var, ovar, nvar, t, f, x, record_type;
5498 if (ctx->record_type == NULL)
5499 return;
5501 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5502 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5504 ovar = DECL_ABSTRACT_ORIGIN (f);
5505 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5506 continue;
5508 nvar = maybe_lookup_decl (ovar, ctx);
5509 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5510 continue;
5512 /* If CTX is a nested parallel directive. Find the immediately
5513 enclosing parallel or workshare construct that contains a
5514 mapping for OVAR. */
5515 var = lookup_decl_in_outer_ctx (ovar, ctx);
5517 t = omp_member_access_dummy_var (var);
5518 if (t)
5520 var = DECL_VALUE_EXPR (var);
5521 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5522 if (o != t)
5523 var = unshare_and_remap (var, t, o);
5524 else
5525 var = unshare_expr (var);
5528 if (use_pointer_for_field (ovar, ctx))
5530 x = build_sender_ref (ovar, ctx);
5531 var = build_fold_addr_expr (var);
5532 gimplify_assign (x, var, ilist);
5534 else
5536 x = build_sender_ref (ovar, ctx);
5537 gimplify_assign (x, var, ilist);
5539 if (!TREE_READONLY (var)
5540 /* We don't need to receive a new reference to a result
5541 or parm decl. In fact we may not store to it as we will
5542 invalidate any pending RSO and generate wrong gimple
5543 during inlining. */
5544 && !((TREE_CODE (var) == RESULT_DECL
5545 || TREE_CODE (var) == PARM_DECL)
5546 && DECL_BY_REFERENCE (var)))
5548 x = build_sender_ref (ovar, ctx);
5549 gimplify_assign (var, x, olist);
5555 /* Emit an OpenACC head marker call, encapulating the partitioning and
5556 other information that must be processed by the target compiler.
5557 Return the maximum number of dimensions the associated loop might
5558 be partitioned over. */
5560 static unsigned
5561 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5562 gimple_seq *seq, omp_context *ctx)
5564 unsigned levels = 0;
5565 unsigned tag = 0;
5566 tree gang_static = NULL_TREE;
5567 auto_vec<tree, 5> args;
5569 args.quick_push (build_int_cst
5570 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5571 args.quick_push (ddvar);
5572 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5574 switch (OMP_CLAUSE_CODE (c))
5576 case OMP_CLAUSE_GANG:
5577 tag |= OLF_DIM_GANG;
5578 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5579 /* static:* is represented by -1, and we can ignore it, as
5580 scheduling is always static. */
5581 if (gang_static && integer_minus_onep (gang_static))
5582 gang_static = NULL_TREE;
5583 levels++;
5584 break;
5586 case OMP_CLAUSE_WORKER:
5587 tag |= OLF_DIM_WORKER;
5588 levels++;
5589 break;
5591 case OMP_CLAUSE_VECTOR:
5592 tag |= OLF_DIM_VECTOR;
5593 levels++;
5594 break;
5596 case OMP_CLAUSE_SEQ:
5597 tag |= OLF_SEQ;
5598 break;
5600 case OMP_CLAUSE_AUTO:
5601 tag |= OLF_AUTO;
5602 break;
5604 case OMP_CLAUSE_INDEPENDENT:
5605 tag |= OLF_INDEPENDENT;
5606 break;
5608 default:
5609 continue;
5613 if (gang_static)
5615 if (DECL_P (gang_static))
5616 gang_static = build_outer_var_ref (gang_static, ctx);
5617 tag |= OLF_GANG_STATIC;
5620 /* In a parallel region, loops are implicitly INDEPENDENT. */
5621 omp_context *tgt = enclosing_target_ctx (ctx);
5622 if (!tgt || is_oacc_parallel (tgt))
5623 tag |= OLF_INDEPENDENT;
5625 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR is implicitly AUTO. */
5626 if (!(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1) << OLF_DIM_BASE)
5627 | OLF_SEQ)))
5628 tag |= OLF_AUTO;
5630 /* Ensure at least one level. */
5631 if (!levels)
5632 levels++;
5634 args.quick_push (build_int_cst (integer_type_node, levels));
5635 args.quick_push (build_int_cst (integer_type_node, tag));
5636 if (gang_static)
5637 args.quick_push (gang_static);
5639 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5640 gimple_set_location (call, loc);
5641 gimple_set_lhs (call, ddvar);
5642 gimple_seq_add_stmt (seq, call);
5644 return levels;
5647 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5648 partitioning level of the enclosed region. */
5650 static void
5651 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5652 tree tofollow, gimple_seq *seq)
5654 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5655 : IFN_UNIQUE_OACC_TAIL_MARK);
5656 tree marker = build_int_cst (integer_type_node, marker_kind);
5657 int nargs = 2 + (tofollow != NULL_TREE);
5658 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5659 marker, ddvar, tofollow);
5660 gimple_set_location (call, loc);
5661 gimple_set_lhs (call, ddvar);
5662 gimple_seq_add_stmt (seq, call);
5665 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5666 the loop clauses, from which we extract reductions. Initialize
5667 HEAD and TAIL. */
5669 static void
5670 lower_oacc_head_tail (location_t loc, tree clauses,
5671 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5673 bool inner = false;
5674 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5675 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5677 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5678 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5679 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5681 gcc_assert (count);
5682 for (unsigned done = 1; count; count--, done++)
5684 gimple_seq fork_seq = NULL;
5685 gimple_seq join_seq = NULL;
5687 tree place = build_int_cst (integer_type_node, -1);
5688 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5689 fork_kind, ddvar, place);
5690 gimple_set_location (fork, loc);
5691 gimple_set_lhs (fork, ddvar);
5693 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5694 join_kind, ddvar, place);
5695 gimple_set_location (join, loc);
5696 gimple_set_lhs (join, ddvar);
5698 /* Mark the beginning of this level sequence. */
5699 if (inner)
5700 lower_oacc_loop_marker (loc, ddvar, true,
5701 build_int_cst (integer_type_node, count),
5702 &fork_seq);
5703 lower_oacc_loop_marker (loc, ddvar, false,
5704 build_int_cst (integer_type_node, done),
5705 &join_seq);
5707 lower_oacc_reductions (loc, clauses, place, inner,
5708 fork, join, &fork_seq, &join_seq, ctx);
5710 /* Append this level to head. */
5711 gimple_seq_add_seq (head, fork_seq);
5712 /* Prepend it to tail. */
5713 gimple_seq_add_seq (&join_seq, *tail);
5714 *tail = join_seq;
5716 inner = true;
5719 /* Mark the end of the sequence. */
5720 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5721 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5724 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5725 catch handler and return it. This prevents programs from violating the
5726 structured block semantics with throws. */
5728 static gimple_seq
5729 maybe_catch_exception (gimple_seq body)
5731 gimple *g;
5732 tree decl;
5734 if (!flag_exceptions)
5735 return body;
5737 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5738 decl = lang_hooks.eh_protect_cleanup_actions ();
5739 else
5740 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5742 g = gimple_build_eh_must_not_throw (decl);
5743 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5744 GIMPLE_TRY_CATCH);
5746 return gimple_seq_alloc_with_stmt (g);
5750 /* Routines to lower OMP directives into OMP-GIMPLE. */
5752 /* If ctx is a worksharing context inside of a cancellable parallel
5753 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5754 and conditional branch to parallel's cancel_label to handle
5755 cancellation in the implicit barrier. */
5757 static void
5758 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5760 gimple *omp_return = gimple_seq_last_stmt (*body);
5761 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5762 if (gimple_omp_return_nowait_p (omp_return))
5763 return;
5764 if (ctx->outer
5765 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5766 && ctx->outer->cancellable)
5768 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5769 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5770 tree lhs = create_tmp_var (c_bool_type);
5771 gimple_omp_return_set_lhs (omp_return, lhs);
5772 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5773 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5774 fold_convert (c_bool_type,
5775 boolean_false_node),
5776 ctx->outer->cancel_label, fallthru_label);
5777 gimple_seq_add_stmt (body, g);
5778 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5782 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5783 CTX is the enclosing OMP context for the current statement. */
5785 static void
5786 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5788 tree block, control;
5789 gimple_stmt_iterator tgsi;
5790 gomp_sections *stmt;
5791 gimple *t;
5792 gbind *new_stmt, *bind;
5793 gimple_seq ilist, dlist, olist, new_body;
5795 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5797 push_gimplify_context ();
5799 dlist = NULL;
5800 ilist = NULL;
5801 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5802 &ilist, &dlist, ctx, NULL);
5804 new_body = gimple_omp_body (stmt);
5805 gimple_omp_set_body (stmt, NULL);
5806 tgsi = gsi_start (new_body);
5807 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5809 omp_context *sctx;
5810 gimple *sec_start;
5812 sec_start = gsi_stmt (tgsi);
5813 sctx = maybe_lookup_ctx (sec_start);
5814 gcc_assert (sctx);
5816 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5817 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5818 GSI_CONTINUE_LINKING);
5819 gimple_omp_set_body (sec_start, NULL);
5821 if (gsi_one_before_end_p (tgsi))
5823 gimple_seq l = NULL;
5824 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5825 &l, ctx);
5826 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5827 gimple_omp_section_set_last (sec_start);
5830 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5831 GSI_CONTINUE_LINKING);
5834 block = make_node (BLOCK);
5835 bind = gimple_build_bind (NULL, new_body, block);
5837 olist = NULL;
5838 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5840 block = make_node (BLOCK);
5841 new_stmt = gimple_build_bind (NULL, NULL, block);
5842 gsi_replace (gsi_p, new_stmt, true);
5844 pop_gimplify_context (new_stmt);
5845 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5846 BLOCK_VARS (block) = gimple_bind_vars (bind);
5847 if (BLOCK_VARS (block))
5848 TREE_USED (block) = 1;
5850 new_body = NULL;
5851 gimple_seq_add_seq (&new_body, ilist);
5852 gimple_seq_add_stmt (&new_body, stmt);
5853 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5854 gimple_seq_add_stmt (&new_body, bind);
5856 control = create_tmp_var (unsigned_type_node, ".section");
5857 t = gimple_build_omp_continue (control, control);
5858 gimple_omp_sections_set_control (stmt, control);
5859 gimple_seq_add_stmt (&new_body, t);
5861 gimple_seq_add_seq (&new_body, olist);
5862 if (ctx->cancellable)
5863 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5864 gimple_seq_add_seq (&new_body, dlist);
5866 new_body = maybe_catch_exception (new_body);
5868 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5869 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5870 t = gimple_build_omp_return (nowait);
5871 gimple_seq_add_stmt (&new_body, t);
5872 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5874 gimple_bind_set_body (new_stmt, new_body);
5878 /* A subroutine of lower_omp_single. Expand the simple form of
5879 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5881 if (GOMP_single_start ())
5882 BODY;
5883 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5885 FIXME. It may be better to delay expanding the logic of this until
5886 pass_expand_omp. The expanded logic may make the job more difficult
5887 to a synchronization analysis pass. */
5889 static void
5890 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5892 location_t loc = gimple_location (single_stmt);
5893 tree tlabel = create_artificial_label (loc);
5894 tree flabel = create_artificial_label (loc);
5895 gimple *call, *cond;
5896 tree lhs, decl;
5898 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5899 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5900 call = gimple_build_call (decl, 0);
5901 gimple_call_set_lhs (call, lhs);
5902 gimple_seq_add_stmt (pre_p, call);
5904 cond = gimple_build_cond (EQ_EXPR, lhs,
5905 fold_convert_loc (loc, TREE_TYPE (lhs),
5906 boolean_true_node),
5907 tlabel, flabel);
5908 gimple_seq_add_stmt (pre_p, cond);
5909 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5910 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5911 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5915 /* A subroutine of lower_omp_single. Expand the simple form of
5916 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5918 #pragma omp single copyprivate (a, b, c)
5920 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5923 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5925 BODY;
5926 copyout.a = a;
5927 copyout.b = b;
5928 copyout.c = c;
5929 GOMP_single_copy_end (&copyout);
5931 else
5933 a = copyout_p->a;
5934 b = copyout_p->b;
5935 c = copyout_p->c;
5937 GOMP_barrier ();
5940 FIXME. It may be better to delay expanding the logic of this until
5941 pass_expand_omp. The expanded logic may make the job more difficult
5942 to a synchronization analysis pass. */
5944 static void
5945 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5946 omp_context *ctx)
5948 tree ptr_type, t, l0, l1, l2, bfn_decl;
5949 gimple_seq copyin_seq;
5950 location_t loc = gimple_location (single_stmt);
5952 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5954 ptr_type = build_pointer_type (ctx->record_type);
5955 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5957 l0 = create_artificial_label (loc);
5958 l1 = create_artificial_label (loc);
5959 l2 = create_artificial_label (loc);
5961 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
5962 t = build_call_expr_loc (loc, bfn_decl, 0);
5963 t = fold_convert_loc (loc, ptr_type, t);
5964 gimplify_assign (ctx->receiver_decl, t, pre_p);
5966 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5967 build_int_cst (ptr_type, 0));
5968 t = build3 (COND_EXPR, void_type_node, t,
5969 build_and_jump (&l0), build_and_jump (&l1));
5970 gimplify_and_add (t, pre_p);
5972 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
5974 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5976 copyin_seq = NULL;
5977 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
5978 &copyin_seq, ctx);
5980 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
5981 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
5982 t = build_call_expr_loc (loc, bfn_decl, 1, t);
5983 gimplify_and_add (t, pre_p);
5985 t = build_and_jump (&l2);
5986 gimplify_and_add (t, pre_p);
5988 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
5990 gimple_seq_add_seq (pre_p, copyin_seq);
5992 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
5996 /* Expand code for an OpenMP single directive. */
5998 static void
5999 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6001 tree block;
6002 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6003 gbind *bind;
6004 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6006 push_gimplify_context ();
6008 block = make_node (BLOCK);
6009 bind = gimple_build_bind (NULL, NULL, block);
6010 gsi_replace (gsi_p, bind, true);
6011 bind_body = NULL;
6012 dlist = NULL;
6013 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6014 &bind_body, &dlist, ctx, NULL);
6015 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6017 gimple_seq_add_stmt (&bind_body, single_stmt);
6019 if (ctx->record_type)
6020 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6021 else
6022 lower_omp_single_simple (single_stmt, &bind_body);
6024 gimple_omp_set_body (single_stmt, NULL);
6026 gimple_seq_add_seq (&bind_body, dlist);
6028 bind_body = maybe_catch_exception (bind_body);
6030 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6031 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6032 gimple *g = gimple_build_omp_return (nowait);
6033 gimple_seq_add_stmt (&bind_body_tail, g);
6034 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6035 if (ctx->record_type)
6037 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6038 tree clobber = build_constructor (ctx->record_type, NULL);
6039 TREE_THIS_VOLATILE (clobber) = 1;
6040 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6041 clobber), GSI_SAME_STMT);
6043 gimple_seq_add_seq (&bind_body, bind_body_tail);
6044 gimple_bind_set_body (bind, bind_body);
6046 pop_gimplify_context (bind);
6048 gimple_bind_append_vars (bind, ctx->block_vars);
6049 BLOCK_VARS (block) = ctx->block_vars;
6050 if (BLOCK_VARS (block))
6051 TREE_USED (block) = 1;
6055 /* Expand code for an OpenMP master directive. */
6057 static void
6058 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6060 tree block, lab = NULL, x, bfn_decl;
6061 gimple *stmt = gsi_stmt (*gsi_p);
6062 gbind *bind;
6063 location_t loc = gimple_location (stmt);
6064 gimple_seq tseq;
6066 push_gimplify_context ();
6068 block = make_node (BLOCK);
6069 bind = gimple_build_bind (NULL, NULL, block);
6070 gsi_replace (gsi_p, bind, true);
6071 gimple_bind_add_stmt (bind, stmt);
6073 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6074 x = build_call_expr_loc (loc, bfn_decl, 0);
6075 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6076 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6077 tseq = NULL;
6078 gimplify_and_add (x, &tseq);
6079 gimple_bind_add_seq (bind, tseq);
6081 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6082 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6083 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6084 gimple_omp_set_body (stmt, NULL);
6086 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6088 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6090 pop_gimplify_context (bind);
6092 gimple_bind_append_vars (bind, ctx->block_vars);
6093 BLOCK_VARS (block) = ctx->block_vars;
6097 /* Expand code for an OpenMP taskgroup directive. */
6099 static void
6100 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6102 gimple *stmt = gsi_stmt (*gsi_p);
6103 gcall *x;
6104 gbind *bind;
6105 tree block = make_node (BLOCK);
6107 bind = gimple_build_bind (NULL, NULL, block);
6108 gsi_replace (gsi_p, bind, true);
6109 gimple_bind_add_stmt (bind, stmt);
6111 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6113 gimple_bind_add_stmt (bind, x);
6115 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6116 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6117 gimple_omp_set_body (stmt, NULL);
6119 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6121 gimple_bind_append_vars (bind, ctx->block_vars);
6122 BLOCK_VARS (block) = ctx->block_vars;
6126 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6128 static void
6129 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6130 omp_context *ctx)
6132 struct omp_for_data fd;
6133 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6134 return;
6136 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6137 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6138 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6139 if (!fd.ordered)
6140 return;
6142 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6143 tree c = gimple_omp_ordered_clauses (ord_stmt);
6144 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6145 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6147 /* Merge depend clauses from multiple adjacent
6148 #pragma omp ordered depend(sink:...) constructs
6149 into one #pragma omp ordered depend(sink:...), so that
6150 we can optimize them together. */
6151 gimple_stmt_iterator gsi = *gsi_p;
6152 gsi_next (&gsi);
6153 while (!gsi_end_p (gsi))
6155 gimple *stmt = gsi_stmt (gsi);
6156 if (is_gimple_debug (stmt)
6157 || gimple_code (stmt) == GIMPLE_NOP)
6159 gsi_next (&gsi);
6160 continue;
6162 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6163 break;
6164 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6165 c = gimple_omp_ordered_clauses (ord_stmt2);
6166 if (c == NULL_TREE
6167 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6168 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6169 break;
6170 while (*list_p)
6171 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6172 *list_p = c;
6173 gsi_remove (&gsi, true);
6177 /* Canonicalize sink dependence clauses into one folded clause if
6178 possible.
6180 The basic algorithm is to create a sink vector whose first
6181 element is the GCD of all the first elements, and whose remaining
6182 elements are the minimum of the subsequent columns.
6184 We ignore dependence vectors whose first element is zero because
6185 such dependencies are known to be executed by the same thread.
6187 We take into account the direction of the loop, so a minimum
6188 becomes a maximum if the loop is iterating forwards. We also
6189 ignore sink clauses where the loop direction is unknown, or where
6190 the offsets are clearly invalid because they are not a multiple
6191 of the loop increment.
6193 For example:
6195 #pragma omp for ordered(2)
6196 for (i=0; i < N; ++i)
6197 for (j=0; j < M; ++j)
6199 #pragma omp ordered \
6200 depend(sink:i-8,j-2) \
6201 depend(sink:i,j-1) \ // Completely ignored because i+0.
6202 depend(sink:i-4,j-3) \
6203 depend(sink:i-6,j-4)
6204 #pragma omp ordered depend(source)
6207 Folded clause is:
6209 depend(sink:-gcd(8,4,6),-min(2,3,4))
6210 -or-
6211 depend(sink:-2,-2)
6214 /* FIXME: Computing GCD's where the first element is zero is
6215 non-trivial in the presence of collapsed loops. Do this later. */
6216 if (fd.collapse > 1)
6217 return;
6219 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6220 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6221 tree folded_dep = NULL_TREE;
6222 /* TRUE if the first dimension's offset is negative. */
6223 bool neg_offset_p = false;
6225 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6226 unsigned int i;
6227 while ((c = *list_p) != NULL)
6229 bool remove = false;
6231 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6232 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6233 goto next_ordered_clause;
6235 tree vec;
6236 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6237 vec && TREE_CODE (vec) == TREE_LIST;
6238 vec = TREE_CHAIN (vec), ++i)
6240 gcc_assert (i < len);
6242 /* omp_extract_for_data has canonicalized the condition. */
6243 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6244 || fd.loops[i].cond_code == GT_EXPR);
6245 bool forward = fd.loops[i].cond_code == LT_EXPR;
6246 bool maybe_lexically_later = true;
6248 /* While the committee makes up its mind, bail if we have any
6249 non-constant steps. */
6250 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6251 goto lower_omp_ordered_ret;
6253 tree itype = TREE_TYPE (TREE_VALUE (vec));
6254 if (POINTER_TYPE_P (itype))
6255 itype = sizetype;
6256 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6257 TYPE_PRECISION (itype),
6258 TYPE_SIGN (itype));
6260 /* Ignore invalid offsets that are not multiples of the step. */
6261 if (!wi::multiple_of_p
6262 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6263 UNSIGNED))
6265 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6266 "ignoring sink clause with offset that is not "
6267 "a multiple of the loop step");
6268 remove = true;
6269 goto next_ordered_clause;
6272 /* Calculate the first dimension. The first dimension of
6273 the folded dependency vector is the GCD of the first
6274 elements, while ignoring any first elements whose offset
6275 is 0. */
6276 if (i == 0)
6278 /* Ignore dependence vectors whose first dimension is 0. */
6279 if (offset == 0)
6281 remove = true;
6282 goto next_ordered_clause;
6284 else
6286 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6288 error_at (OMP_CLAUSE_LOCATION (c),
6289 "first offset must be in opposite direction "
6290 "of loop iterations");
6291 goto lower_omp_ordered_ret;
6293 if (forward)
6294 offset = -offset;
6295 neg_offset_p = forward;
6296 /* Initialize the first time around. */
6297 if (folded_dep == NULL_TREE)
6299 folded_dep = c;
6300 folded_deps[0] = offset;
6302 else
6303 folded_deps[0] = wi::gcd (folded_deps[0],
6304 offset, UNSIGNED);
6307 /* Calculate minimum for the remaining dimensions. */
6308 else
6310 folded_deps[len + i - 1] = offset;
6311 if (folded_dep == c)
6312 folded_deps[i] = offset;
6313 else if (maybe_lexically_later
6314 && !wi::eq_p (folded_deps[i], offset))
6316 if (forward ^ wi::gts_p (folded_deps[i], offset))
6318 unsigned int j;
6319 folded_dep = c;
6320 for (j = 1; j <= i; j++)
6321 folded_deps[j] = folded_deps[len + j - 1];
6323 else
6324 maybe_lexically_later = false;
6328 gcc_assert (i == len);
6330 remove = true;
6332 next_ordered_clause:
6333 if (remove)
6334 *list_p = OMP_CLAUSE_CHAIN (c);
6335 else
6336 list_p = &OMP_CLAUSE_CHAIN (c);
6339 if (folded_dep)
6341 if (neg_offset_p)
6342 folded_deps[0] = -folded_deps[0];
6344 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6345 if (POINTER_TYPE_P (itype))
6346 itype = sizetype;
6348 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6349 = wide_int_to_tree (itype, folded_deps[0]);
6350 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6351 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6354 lower_omp_ordered_ret:
6356 /* Ordered without clauses is #pragma omp threads, while we want
6357 a nop instead if we remove all clauses. */
6358 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6359 gsi_replace (gsi_p, gimple_build_nop (), true);
6363 /* Expand code for an OpenMP ordered directive. */
6365 static void
6366 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6368 tree block;
6369 gimple *stmt = gsi_stmt (*gsi_p), *g;
6370 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6371 gcall *x;
6372 gbind *bind;
6373 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6374 OMP_CLAUSE_SIMD);
6375 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6376 loop. */
6377 bool maybe_simt
6378 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6379 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6380 OMP_CLAUSE_THREADS);
6382 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6383 OMP_CLAUSE_DEPEND))
6385 /* FIXME: This is needs to be moved to the expansion to verify various
6386 conditions only testable on cfg with dominators computed, and also
6387 all the depend clauses to be merged still might need to be available
6388 for the runtime checks. */
6389 if (0)
6390 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6391 return;
6394 push_gimplify_context ();
6396 block = make_node (BLOCK);
6397 bind = gimple_build_bind (NULL, NULL, block);
6398 gsi_replace (gsi_p, bind, true);
6399 gimple_bind_add_stmt (bind, stmt);
6401 if (simd)
6403 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6404 build_int_cst (NULL_TREE, threads));
6405 cfun->has_simduid_loops = true;
6407 else
6408 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6410 gimple_bind_add_stmt (bind, x);
6412 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6413 if (maybe_simt)
6415 counter = create_tmp_var (integer_type_node);
6416 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6417 gimple_call_set_lhs (g, counter);
6418 gimple_bind_add_stmt (bind, g);
6420 body = create_artificial_label (UNKNOWN_LOCATION);
6421 test = create_artificial_label (UNKNOWN_LOCATION);
6422 gimple_bind_add_stmt (bind, gimple_build_label (body));
6424 tree simt_pred = create_tmp_var (integer_type_node);
6425 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6426 gimple_call_set_lhs (g, simt_pred);
6427 gimple_bind_add_stmt (bind, g);
6429 tree t = create_artificial_label (UNKNOWN_LOCATION);
6430 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6431 gimple_bind_add_stmt (bind, g);
6433 gimple_bind_add_stmt (bind, gimple_build_label (t));
6435 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6436 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6437 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6438 gimple_omp_set_body (stmt, NULL);
6440 if (maybe_simt)
6442 gimple_bind_add_stmt (bind, gimple_build_label (test));
6443 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6444 gimple_bind_add_stmt (bind, g);
6446 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6447 tree nonneg = create_tmp_var (integer_type_node);
6448 gimple_seq tseq = NULL;
6449 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6450 gimple_bind_add_seq (bind, tseq);
6452 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6453 gimple_call_set_lhs (g, nonneg);
6454 gimple_bind_add_stmt (bind, g);
6456 tree end = create_artificial_label (UNKNOWN_LOCATION);
6457 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6458 gimple_bind_add_stmt (bind, g);
6460 gimple_bind_add_stmt (bind, gimple_build_label (end));
6462 if (simd)
6463 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6464 build_int_cst (NULL_TREE, threads));
6465 else
6466 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6468 gimple_bind_add_stmt (bind, x);
6470 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6472 pop_gimplify_context (bind);
6474 gimple_bind_append_vars (bind, ctx->block_vars);
6475 BLOCK_VARS (block) = gimple_bind_vars (bind);
6479 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6480 substitution of a couple of function calls. But in the NAMED case,
6481 requires that languages coordinate a symbol name. It is therefore
6482 best put here in common code. */
6484 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6486 static void
6487 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6489 tree block;
6490 tree name, lock, unlock;
6491 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6492 gbind *bind;
6493 location_t loc = gimple_location (stmt);
6494 gimple_seq tbody;
6496 name = gimple_omp_critical_name (stmt);
6497 if (name)
6499 tree decl;
6501 if (!critical_name_mutexes)
6502 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6504 tree *n = critical_name_mutexes->get (name);
6505 if (n == NULL)
6507 char *new_str;
6509 decl = create_tmp_var_raw (ptr_type_node);
6511 new_str = ACONCAT ((".gomp_critical_user_",
6512 IDENTIFIER_POINTER (name), NULL));
6513 DECL_NAME (decl) = get_identifier (new_str);
6514 TREE_PUBLIC (decl) = 1;
6515 TREE_STATIC (decl) = 1;
6516 DECL_COMMON (decl) = 1;
6517 DECL_ARTIFICIAL (decl) = 1;
6518 DECL_IGNORED_P (decl) = 1;
6520 varpool_node::finalize_decl (decl);
6522 critical_name_mutexes->put (name, decl);
6524 else
6525 decl = *n;
6527 /* If '#pragma omp critical' is inside offloaded region or
6528 inside function marked as offloadable, the symbol must be
6529 marked as offloadable too. */
6530 omp_context *octx;
6531 if (cgraph_node::get (current_function_decl)->offloadable)
6532 varpool_node::get_create (decl)->offloadable = 1;
6533 else
6534 for (octx = ctx->outer; octx; octx = octx->outer)
6535 if (is_gimple_omp_offloaded (octx->stmt))
6537 varpool_node::get_create (decl)->offloadable = 1;
6538 break;
6541 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6542 lock = build_call_expr_loc (loc, lock, 1,
6543 build_fold_addr_expr_loc (loc, decl));
6545 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6546 unlock = build_call_expr_loc (loc, unlock, 1,
6547 build_fold_addr_expr_loc (loc, decl));
6549 else
6551 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6552 lock = build_call_expr_loc (loc, lock, 0);
6554 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6555 unlock = build_call_expr_loc (loc, unlock, 0);
6558 push_gimplify_context ();
6560 block = make_node (BLOCK);
6561 bind = gimple_build_bind (NULL, NULL, block);
6562 gsi_replace (gsi_p, bind, true);
6563 gimple_bind_add_stmt (bind, stmt);
6565 tbody = gimple_bind_body (bind);
6566 gimplify_and_add (lock, &tbody);
6567 gimple_bind_set_body (bind, tbody);
6569 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6570 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6571 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6572 gimple_omp_set_body (stmt, NULL);
6574 tbody = gimple_bind_body (bind);
6575 gimplify_and_add (unlock, &tbody);
6576 gimple_bind_set_body (bind, tbody);
6578 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6580 pop_gimplify_context (bind);
6581 gimple_bind_append_vars (bind, ctx->block_vars);
6582 BLOCK_VARS (block) = gimple_bind_vars (bind);
6585 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6586 for a lastprivate clause. Given a loop control predicate of (V
6587 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6588 is appended to *DLIST, iterator initialization is appended to
6589 *BODY_P. */
6591 static void
6592 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6593 gimple_seq *dlist, struct omp_context *ctx)
6595 tree clauses, cond, vinit;
6596 enum tree_code cond_code;
6597 gimple_seq stmts;
6599 cond_code = fd->loop.cond_code;
6600 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6602 /* When possible, use a strict equality expression. This can let VRP
6603 type optimizations deduce the value and remove a copy. */
6604 if (tree_fits_shwi_p (fd->loop.step))
6606 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6607 if (step == 1 || step == -1)
6608 cond_code = EQ_EXPR;
6611 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6612 || gimple_omp_for_grid_phony (fd->for_stmt))
6613 cond = omp_grid_lastprivate_predicate (fd);
6614 else
6616 tree n2 = fd->loop.n2;
6617 if (fd->collapse > 1
6618 && TREE_CODE (n2) != INTEGER_CST
6619 && gimple_omp_for_combined_into_p (fd->for_stmt))
6621 struct omp_context *taskreg_ctx = NULL;
6622 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6624 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6625 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6626 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6628 if (gimple_omp_for_combined_into_p (gfor))
6630 gcc_assert (ctx->outer->outer
6631 && is_parallel_ctx (ctx->outer->outer));
6632 taskreg_ctx = ctx->outer->outer;
6634 else
6636 struct omp_for_data outer_fd;
6637 omp_extract_for_data (gfor, &outer_fd, NULL);
6638 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6641 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6642 taskreg_ctx = ctx->outer->outer;
6644 else if (is_taskreg_ctx (ctx->outer))
6645 taskreg_ctx = ctx->outer;
6646 if (taskreg_ctx)
6648 int i;
6649 tree taskreg_clauses
6650 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6651 tree innerc = omp_find_clause (taskreg_clauses,
6652 OMP_CLAUSE__LOOPTEMP_);
6653 gcc_assert (innerc);
6654 for (i = 0; i < fd->collapse; i++)
6656 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6657 OMP_CLAUSE__LOOPTEMP_);
6658 gcc_assert (innerc);
6660 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6661 OMP_CLAUSE__LOOPTEMP_);
6662 if (innerc)
6663 n2 = fold_convert (TREE_TYPE (n2),
6664 lookup_decl (OMP_CLAUSE_DECL (innerc),
6665 taskreg_ctx));
6668 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6671 clauses = gimple_omp_for_clauses (fd->for_stmt);
6672 stmts = NULL;
6673 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6674 if (!gimple_seq_empty_p (stmts))
6676 gimple_seq_add_seq (&stmts, *dlist);
6677 *dlist = stmts;
6679 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6680 vinit = fd->loop.n1;
6681 if (cond_code == EQ_EXPR
6682 && tree_fits_shwi_p (fd->loop.n2)
6683 && ! integer_zerop (fd->loop.n2))
6684 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6685 else
6686 vinit = unshare_expr (vinit);
6688 /* Initialize the iterator variable, so that threads that don't execute
6689 any iterations don't execute the lastprivate clauses by accident. */
6690 gimplify_assign (fd->loop.v, vinit, body_p);
6695 /* Lower code for an OMP loop directive. */
6697 static void
6698 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6700 tree *rhs_p, block;
6701 struct omp_for_data fd, *fdp = NULL;
6702 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6703 gbind *new_stmt;
6704 gimple_seq omp_for_body, body, dlist;
6705 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6706 size_t i;
6708 push_gimplify_context ();
6710 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6712 block = make_node (BLOCK);
6713 new_stmt = gimple_build_bind (NULL, NULL, block);
6714 /* Replace at gsi right away, so that 'stmt' is no member
6715 of a sequence anymore as we're going to add to a different
6716 one below. */
6717 gsi_replace (gsi_p, new_stmt, true);
6719 /* Move declaration of temporaries in the loop body before we make
6720 it go away. */
6721 omp_for_body = gimple_omp_body (stmt);
6722 if (!gimple_seq_empty_p (omp_for_body)
6723 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6725 gbind *inner_bind
6726 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6727 tree vars = gimple_bind_vars (inner_bind);
6728 gimple_bind_append_vars (new_stmt, vars);
6729 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6730 keep them on the inner_bind and it's block. */
6731 gimple_bind_set_vars (inner_bind, NULL_TREE);
6732 if (gimple_bind_block (inner_bind))
6733 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6736 if (gimple_omp_for_combined_into_p (stmt))
6738 omp_extract_for_data (stmt, &fd, NULL);
6739 fdp = &fd;
6741 /* We need two temporaries with fd.loop.v type (istart/iend)
6742 and then (fd.collapse - 1) temporaries with the same
6743 type for count2 ... countN-1 vars if not constant. */
6744 size_t count = 2;
6745 tree type = fd.iter_type;
6746 if (fd.collapse > 1
6747 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6748 count += fd.collapse - 1;
6749 bool taskreg_for
6750 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6751 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6752 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6753 tree clauses = *pc;
6754 if (taskreg_for)
6755 outerc
6756 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6757 OMP_CLAUSE__LOOPTEMP_);
6758 for (i = 0; i < count; i++)
6760 tree temp;
6761 if (taskreg_for)
6763 gcc_assert (outerc);
6764 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6765 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6766 OMP_CLAUSE__LOOPTEMP_);
6768 else
6770 temp = create_tmp_var (type);
6771 insert_decl_map (&ctx->outer->cb, temp, temp);
6773 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6774 OMP_CLAUSE_DECL (*pc) = temp;
6775 pc = &OMP_CLAUSE_CHAIN (*pc);
6777 *pc = clauses;
6780 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6781 dlist = NULL;
6782 body = NULL;
6783 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6784 fdp);
6785 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6787 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6789 /* Lower the header expressions. At this point, we can assume that
6790 the header is of the form:
6792 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6794 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6795 using the .omp_data_s mapping, if needed. */
6796 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6798 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6799 if (!is_gimple_min_invariant (*rhs_p))
6800 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6802 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6803 if (!is_gimple_min_invariant (*rhs_p))
6804 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6806 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6807 if (!is_gimple_min_invariant (*rhs_p))
6808 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6811 /* Once lowered, extract the bounds and clauses. */
6812 omp_extract_for_data (stmt, &fd, NULL);
6814 if (is_gimple_omp_oacc (ctx->stmt)
6815 && !ctx_in_oacc_kernels_region (ctx))
6816 lower_oacc_head_tail (gimple_location (stmt),
6817 gimple_omp_for_clauses (stmt),
6818 &oacc_head, &oacc_tail, ctx);
6820 /* Add OpenACC partitioning and reduction markers just before the loop. */
6821 if (oacc_head)
6822 gimple_seq_add_seq (&body, oacc_head);
6824 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6826 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6827 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6828 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6829 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6831 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6832 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6833 OMP_CLAUSE_LINEAR_STEP (c)
6834 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6835 ctx);
6838 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6839 && gimple_omp_for_grid_phony (stmt));
6840 if (!phony_loop)
6841 gimple_seq_add_stmt (&body, stmt);
6842 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6844 if (!phony_loop)
6845 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6846 fd.loop.v));
6848 /* After the loop, add exit clauses. */
6849 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6851 if (ctx->cancellable)
6852 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6854 gimple_seq_add_seq (&body, dlist);
6856 body = maybe_catch_exception (body);
6858 if (!phony_loop)
6860 /* Region exit marker goes at the end of the loop body. */
6861 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6862 maybe_add_implicit_barrier_cancel (ctx, &body);
6865 /* Add OpenACC joining and reduction markers just after the loop. */
6866 if (oacc_tail)
6867 gimple_seq_add_seq (&body, oacc_tail);
6869 pop_gimplify_context (new_stmt);
6871 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6872 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6873 if (BLOCK_VARS (block))
6874 TREE_USED (block) = 1;
6876 gimple_bind_set_body (new_stmt, body);
6877 gimple_omp_set_body (stmt, NULL);
6878 gimple_omp_for_set_pre_body (stmt, NULL);
6881 /* Callback for walk_stmts. Check if the current statement only contains
6882 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6884 static tree
6885 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6886 bool *handled_ops_p,
6887 struct walk_stmt_info *wi)
6889 int *info = (int *) wi->info;
6890 gimple *stmt = gsi_stmt (*gsi_p);
6892 *handled_ops_p = true;
6893 switch (gimple_code (stmt))
6895 WALK_SUBSTMTS;
6897 case GIMPLE_OMP_FOR:
6898 case GIMPLE_OMP_SECTIONS:
6899 *info = *info == 0 ? 1 : -1;
6900 break;
6901 default:
6902 *info = -1;
6903 break;
6905 return NULL;
6908 struct omp_taskcopy_context
6910 /* This field must be at the beginning, as we do "inheritance": Some
6911 callback functions for tree-inline.c (e.g., omp_copy_decl)
6912 receive a copy_body_data pointer that is up-casted to an
6913 omp_context pointer. */
6914 copy_body_data cb;
6915 omp_context *ctx;
6918 static tree
6919 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6921 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6923 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6924 return create_tmp_var (TREE_TYPE (var));
6926 return var;
6929 static tree
6930 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6932 tree name, new_fields = NULL, type, f;
6934 type = lang_hooks.types.make_type (RECORD_TYPE);
6935 name = DECL_NAME (TYPE_NAME (orig_type));
6936 name = build_decl (gimple_location (tcctx->ctx->stmt),
6937 TYPE_DECL, name, type);
6938 TYPE_NAME (type) = name;
6940 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
6942 tree new_f = copy_node (f);
6943 DECL_CONTEXT (new_f) = type;
6944 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
6945 TREE_CHAIN (new_f) = new_fields;
6946 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6947 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6948 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
6949 &tcctx->cb, NULL);
6950 new_fields = new_f;
6951 tcctx->cb.decl_map->put (f, new_f);
6953 TYPE_FIELDS (type) = nreverse (new_fields);
6954 layout_type (type);
6955 return type;
6958 /* Create task copyfn. */
6960 static void
6961 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
6963 struct function *child_cfun;
6964 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
6965 tree record_type, srecord_type, bind, list;
6966 bool record_needs_remap = false, srecord_needs_remap = false;
6967 splay_tree_node n;
6968 struct omp_taskcopy_context tcctx;
6969 location_t loc = gimple_location (task_stmt);
6971 child_fn = gimple_omp_task_copy_fn (task_stmt);
6972 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
6973 gcc_assert (child_cfun->cfg == NULL);
6974 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
6976 /* Reset DECL_CONTEXT on function arguments. */
6977 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
6978 DECL_CONTEXT (t) = child_fn;
6980 /* Populate the function. */
6981 push_gimplify_context ();
6982 push_cfun (child_cfun);
6984 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6985 TREE_SIDE_EFFECTS (bind) = 1;
6986 list = NULL;
6987 DECL_SAVED_TREE (child_fn) = bind;
6988 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
6990 /* Remap src and dst argument types if needed. */
6991 record_type = ctx->record_type;
6992 srecord_type = ctx->srecord_type;
6993 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6994 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6996 record_needs_remap = true;
6997 break;
6999 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7000 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7002 srecord_needs_remap = true;
7003 break;
7006 if (record_needs_remap || srecord_needs_remap)
7008 memset (&tcctx, '\0', sizeof (tcctx));
7009 tcctx.cb.src_fn = ctx->cb.src_fn;
7010 tcctx.cb.dst_fn = child_fn;
7011 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7012 gcc_checking_assert (tcctx.cb.src_node);
7013 tcctx.cb.dst_node = tcctx.cb.src_node;
7014 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7015 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7016 tcctx.cb.eh_lp_nr = 0;
7017 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7018 tcctx.cb.decl_map = new hash_map<tree, tree>;
7019 tcctx.ctx = ctx;
7021 if (record_needs_remap)
7022 record_type = task_copyfn_remap_type (&tcctx, record_type);
7023 if (srecord_needs_remap)
7024 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7026 else
7027 tcctx.cb.decl_map = NULL;
7029 arg = DECL_ARGUMENTS (child_fn);
7030 TREE_TYPE (arg) = build_pointer_type (record_type);
7031 sarg = DECL_CHAIN (arg);
7032 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7034 /* First pass: initialize temporaries used in record_type and srecord_type
7035 sizes and field offsets. */
7036 if (tcctx.cb.decl_map)
7037 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7038 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7040 tree *p;
7042 decl = OMP_CLAUSE_DECL (c);
7043 p = tcctx.cb.decl_map->get (decl);
7044 if (p == NULL)
7045 continue;
7046 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7047 sf = (tree) n->value;
7048 sf = *tcctx.cb.decl_map->get (sf);
7049 src = build_simple_mem_ref_loc (loc, sarg);
7050 src = omp_build_component_ref (src, sf);
7051 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7052 append_to_statement_list (t, &list);
7055 /* Second pass: copy shared var pointers and copy construct non-VLA
7056 firstprivate vars. */
7057 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7058 switch (OMP_CLAUSE_CODE (c))
7060 splay_tree_key key;
7061 case OMP_CLAUSE_SHARED:
7062 decl = OMP_CLAUSE_DECL (c);
7063 key = (splay_tree_key) decl;
7064 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7065 key = (splay_tree_key) &DECL_UID (decl);
7066 n = splay_tree_lookup (ctx->field_map, key);
7067 if (n == NULL)
7068 break;
7069 f = (tree) n->value;
7070 if (tcctx.cb.decl_map)
7071 f = *tcctx.cb.decl_map->get (f);
7072 n = splay_tree_lookup (ctx->sfield_map, key);
7073 sf = (tree) n->value;
7074 if (tcctx.cb.decl_map)
7075 sf = *tcctx.cb.decl_map->get (sf);
7076 src = build_simple_mem_ref_loc (loc, sarg);
7077 src = omp_build_component_ref (src, sf);
7078 dst = build_simple_mem_ref_loc (loc, arg);
7079 dst = omp_build_component_ref (dst, f);
7080 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7081 append_to_statement_list (t, &list);
7082 break;
7083 case OMP_CLAUSE_FIRSTPRIVATE:
7084 decl = OMP_CLAUSE_DECL (c);
7085 if (is_variable_sized (decl))
7086 break;
7087 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7088 if (n == NULL)
7089 break;
7090 f = (tree) n->value;
7091 if (tcctx.cb.decl_map)
7092 f = *tcctx.cb.decl_map->get (f);
7093 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7094 if (n != NULL)
7096 sf = (tree) n->value;
7097 if (tcctx.cb.decl_map)
7098 sf = *tcctx.cb.decl_map->get (sf);
7099 src = build_simple_mem_ref_loc (loc, sarg);
7100 src = omp_build_component_ref (src, sf);
7101 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7102 src = build_simple_mem_ref_loc (loc, src);
7104 else
7105 src = decl;
7106 dst = build_simple_mem_ref_loc (loc, arg);
7107 dst = omp_build_component_ref (dst, f);
7108 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7109 append_to_statement_list (t, &list);
7110 break;
7111 case OMP_CLAUSE_PRIVATE:
7112 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7113 break;
7114 decl = OMP_CLAUSE_DECL (c);
7115 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7116 f = (tree) n->value;
7117 if (tcctx.cb.decl_map)
7118 f = *tcctx.cb.decl_map->get (f);
7119 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7120 if (n != NULL)
7122 sf = (tree) n->value;
7123 if (tcctx.cb.decl_map)
7124 sf = *tcctx.cb.decl_map->get (sf);
7125 src = build_simple_mem_ref_loc (loc, sarg);
7126 src = omp_build_component_ref (src, sf);
7127 if (use_pointer_for_field (decl, NULL))
7128 src = build_simple_mem_ref_loc (loc, src);
7130 else
7131 src = decl;
7132 dst = build_simple_mem_ref_loc (loc, arg);
7133 dst = omp_build_component_ref (dst, f);
7134 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7135 append_to_statement_list (t, &list);
7136 break;
7137 default:
7138 break;
7141 /* Last pass: handle VLA firstprivates. */
7142 if (tcctx.cb.decl_map)
7143 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7144 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7146 tree ind, ptr, df;
7148 decl = OMP_CLAUSE_DECL (c);
7149 if (!is_variable_sized (decl))
7150 continue;
7151 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7152 if (n == NULL)
7153 continue;
7154 f = (tree) n->value;
7155 f = *tcctx.cb.decl_map->get (f);
7156 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7157 ind = DECL_VALUE_EXPR (decl);
7158 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7159 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7160 n = splay_tree_lookup (ctx->sfield_map,
7161 (splay_tree_key) TREE_OPERAND (ind, 0));
7162 sf = (tree) n->value;
7163 sf = *tcctx.cb.decl_map->get (sf);
7164 src = build_simple_mem_ref_loc (loc, sarg);
7165 src = omp_build_component_ref (src, sf);
7166 src = build_simple_mem_ref_loc (loc, src);
7167 dst = build_simple_mem_ref_loc (loc, arg);
7168 dst = omp_build_component_ref (dst, f);
7169 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7170 append_to_statement_list (t, &list);
7171 n = splay_tree_lookup (ctx->field_map,
7172 (splay_tree_key) TREE_OPERAND (ind, 0));
7173 df = (tree) n->value;
7174 df = *tcctx.cb.decl_map->get (df);
7175 ptr = build_simple_mem_ref_loc (loc, arg);
7176 ptr = omp_build_component_ref (ptr, df);
7177 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7178 build_fold_addr_expr_loc (loc, dst));
7179 append_to_statement_list (t, &list);
7182 t = build1 (RETURN_EXPR, void_type_node, NULL);
7183 append_to_statement_list (t, &list);
7185 if (tcctx.cb.decl_map)
7186 delete tcctx.cb.decl_map;
7187 pop_gimplify_context (NULL);
7188 BIND_EXPR_BODY (bind) = list;
7189 pop_cfun ();
7192 static void
7193 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7195 tree c, clauses;
7196 gimple *g;
7197 size_t n_in = 0, n_out = 0, idx = 2, i;
7199 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7200 gcc_assert (clauses);
7201 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7203 switch (OMP_CLAUSE_DEPEND_KIND (c))
7205 case OMP_CLAUSE_DEPEND_IN:
7206 n_in++;
7207 break;
7208 case OMP_CLAUSE_DEPEND_OUT:
7209 case OMP_CLAUSE_DEPEND_INOUT:
7210 n_out++;
7211 break;
7212 case OMP_CLAUSE_DEPEND_SOURCE:
7213 case OMP_CLAUSE_DEPEND_SINK:
7214 /* FALLTHRU */
7215 default:
7216 gcc_unreachable ();
7218 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7219 tree array = create_tmp_var (type);
7220 TREE_ADDRESSABLE (array) = 1;
7221 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7222 NULL_TREE);
7223 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7224 gimple_seq_add_stmt (iseq, g);
7225 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7226 NULL_TREE);
7227 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7228 gimple_seq_add_stmt (iseq, g);
7229 for (i = 0; i < 2; i++)
7231 if ((i ? n_in : n_out) == 0)
7232 continue;
7233 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7234 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7235 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7237 tree t = OMP_CLAUSE_DECL (c);
7238 t = fold_convert (ptr_type_node, t);
7239 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7240 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7241 NULL_TREE, NULL_TREE);
7242 g = gimple_build_assign (r, t);
7243 gimple_seq_add_stmt (iseq, g);
7246 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7247 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7248 OMP_CLAUSE_CHAIN (c) = *pclauses;
7249 *pclauses = c;
7250 tree clobber = build_constructor (type, NULL);
7251 TREE_THIS_VOLATILE (clobber) = 1;
7252 g = gimple_build_assign (array, clobber);
7253 gimple_seq_add_stmt (oseq, g);
7256 /* Lower the OpenMP parallel or task directive in the current statement
7257 in GSI_P. CTX holds context information for the directive. */
7259 static void
7260 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7262 tree clauses;
7263 tree child_fn, t;
7264 gimple *stmt = gsi_stmt (*gsi_p);
7265 gbind *par_bind, *bind, *dep_bind = NULL;
7266 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7267 location_t loc = gimple_location (stmt);
7269 clauses = gimple_omp_taskreg_clauses (stmt);
7270 par_bind
7271 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7272 par_body = gimple_bind_body (par_bind);
7273 child_fn = ctx->cb.dst_fn;
7274 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7275 && !gimple_omp_parallel_combined_p (stmt))
7277 struct walk_stmt_info wi;
7278 int ws_num = 0;
7280 memset (&wi, 0, sizeof (wi));
7281 wi.info = &ws_num;
7282 wi.val_only = true;
7283 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7284 if (ws_num == 1)
7285 gimple_omp_parallel_set_combined_p (stmt, true);
7287 gimple_seq dep_ilist = NULL;
7288 gimple_seq dep_olist = NULL;
7289 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7290 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7292 push_gimplify_context ();
7293 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7294 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7295 &dep_ilist, &dep_olist);
7298 if (ctx->srecord_type)
7299 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7301 push_gimplify_context ();
7303 par_olist = NULL;
7304 par_ilist = NULL;
7305 par_rlist = NULL;
7306 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7307 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7308 if (phony_construct && ctx->record_type)
7310 gcc_checking_assert (!ctx->receiver_decl);
7311 ctx->receiver_decl = create_tmp_var
7312 (build_reference_type (ctx->record_type), ".omp_rec");
7314 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7315 lower_omp (&par_body, ctx);
7316 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7317 lower_reduction_clauses (clauses, &par_rlist, ctx);
7319 /* Declare all the variables created by mapping and the variables
7320 declared in the scope of the parallel body. */
7321 record_vars_into (ctx->block_vars, child_fn);
7322 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7324 if (ctx->record_type)
7326 ctx->sender_decl
7327 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7328 : ctx->record_type, ".omp_data_o");
7329 DECL_NAMELESS (ctx->sender_decl) = 1;
7330 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7331 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7334 olist = NULL;
7335 ilist = NULL;
7336 lower_send_clauses (clauses, &ilist, &olist, ctx);
7337 lower_send_shared_vars (&ilist, &olist, ctx);
7339 if (ctx->record_type)
7341 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7342 TREE_THIS_VOLATILE (clobber) = 1;
7343 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7344 clobber));
7347 /* Once all the expansions are done, sequence all the different
7348 fragments inside gimple_omp_body. */
7350 new_body = NULL;
7352 if (ctx->record_type)
7354 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7355 /* fixup_child_record_type might have changed receiver_decl's type. */
7356 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7357 gimple_seq_add_stmt (&new_body,
7358 gimple_build_assign (ctx->receiver_decl, t));
7361 gimple_seq_add_seq (&new_body, par_ilist);
7362 gimple_seq_add_seq (&new_body, par_body);
7363 gimple_seq_add_seq (&new_body, par_rlist);
7364 if (ctx->cancellable)
7365 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7366 gimple_seq_add_seq (&new_body, par_olist);
7367 new_body = maybe_catch_exception (new_body);
7368 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7369 gimple_seq_add_stmt (&new_body,
7370 gimple_build_omp_continue (integer_zero_node,
7371 integer_zero_node));
7372 if (!phony_construct)
7374 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7375 gimple_omp_set_body (stmt, new_body);
7378 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7379 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7380 gimple_bind_add_seq (bind, ilist);
7381 if (!phony_construct)
7382 gimple_bind_add_stmt (bind, stmt);
7383 else
7384 gimple_bind_add_seq (bind, new_body);
7385 gimple_bind_add_seq (bind, olist);
7387 pop_gimplify_context (NULL);
7389 if (dep_bind)
7391 gimple_bind_add_seq (dep_bind, dep_ilist);
7392 gimple_bind_add_stmt (dep_bind, bind);
7393 gimple_bind_add_seq (dep_bind, dep_olist);
7394 pop_gimplify_context (dep_bind);
7398 /* Lower the GIMPLE_OMP_TARGET in the current statement
7399 in GSI_P. CTX holds context information for the directive. */
7401 static void
7402 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7404 tree clauses;
7405 tree child_fn, t, c;
7406 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7407 gbind *tgt_bind, *bind, *dep_bind = NULL;
7408 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7409 location_t loc = gimple_location (stmt);
7410 bool offloaded, data_region;
7411 unsigned int map_cnt = 0;
7413 offloaded = is_gimple_omp_offloaded (stmt);
7414 switch (gimple_omp_target_kind (stmt))
7416 case GF_OMP_TARGET_KIND_REGION:
7417 case GF_OMP_TARGET_KIND_UPDATE:
7418 case GF_OMP_TARGET_KIND_ENTER_DATA:
7419 case GF_OMP_TARGET_KIND_EXIT_DATA:
7420 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7421 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7422 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7423 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7424 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7425 data_region = false;
7426 break;
7427 case GF_OMP_TARGET_KIND_DATA:
7428 case GF_OMP_TARGET_KIND_OACC_DATA:
7429 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7430 data_region = true;
7431 break;
7432 default:
7433 gcc_unreachable ();
7436 clauses = gimple_omp_target_clauses (stmt);
7438 gimple_seq dep_ilist = NULL;
7439 gimple_seq dep_olist = NULL;
7440 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7442 push_gimplify_context ();
7443 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7444 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7445 &dep_ilist, &dep_olist);
7448 tgt_bind = NULL;
7449 tgt_body = NULL;
7450 if (offloaded)
7452 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7453 tgt_body = gimple_bind_body (tgt_bind);
7455 else if (data_region)
7456 tgt_body = gimple_omp_body (stmt);
7457 child_fn = ctx->cb.dst_fn;
7459 push_gimplify_context ();
7460 fplist = NULL;
7462 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7463 switch (OMP_CLAUSE_CODE (c))
7465 tree var, x;
7467 default:
7468 break;
7469 case OMP_CLAUSE_MAP:
7470 #if CHECKING_P
7471 /* First check what we're prepared to handle in the following. */
7472 switch (OMP_CLAUSE_MAP_KIND (c))
7474 case GOMP_MAP_ALLOC:
7475 case GOMP_MAP_TO:
7476 case GOMP_MAP_FROM:
7477 case GOMP_MAP_TOFROM:
7478 case GOMP_MAP_POINTER:
7479 case GOMP_MAP_TO_PSET:
7480 case GOMP_MAP_DELETE:
7481 case GOMP_MAP_RELEASE:
7482 case GOMP_MAP_ALWAYS_TO:
7483 case GOMP_MAP_ALWAYS_FROM:
7484 case GOMP_MAP_ALWAYS_TOFROM:
7485 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7486 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7487 case GOMP_MAP_STRUCT:
7488 case GOMP_MAP_ALWAYS_POINTER:
7489 break;
7490 case GOMP_MAP_FORCE_ALLOC:
7491 case GOMP_MAP_FORCE_TO:
7492 case GOMP_MAP_FORCE_FROM:
7493 case GOMP_MAP_FORCE_TOFROM:
7494 case GOMP_MAP_FORCE_PRESENT:
7495 case GOMP_MAP_FORCE_DEVICEPTR:
7496 case GOMP_MAP_DEVICE_RESIDENT:
7497 case GOMP_MAP_LINK:
7498 gcc_assert (is_gimple_omp_oacc (stmt));
7499 break;
7500 default:
7501 gcc_unreachable ();
7503 #endif
7504 /* FALLTHRU */
7505 case OMP_CLAUSE_TO:
7506 case OMP_CLAUSE_FROM:
7507 oacc_firstprivate:
7508 var = OMP_CLAUSE_DECL (c);
7509 if (!DECL_P (var))
7511 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7512 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7513 && (OMP_CLAUSE_MAP_KIND (c)
7514 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7515 map_cnt++;
7516 continue;
7519 if (DECL_SIZE (var)
7520 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7522 tree var2 = DECL_VALUE_EXPR (var);
7523 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7524 var2 = TREE_OPERAND (var2, 0);
7525 gcc_assert (DECL_P (var2));
7526 var = var2;
7529 if (offloaded
7530 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7531 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7532 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7534 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7536 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7537 && varpool_node::get_create (var)->offloadable)
7538 continue;
7540 tree type = build_pointer_type (TREE_TYPE (var));
7541 tree new_var = lookup_decl (var, ctx);
7542 x = create_tmp_var_raw (type, get_name (new_var));
7543 gimple_add_tmp_var (x);
7544 x = build_simple_mem_ref (x);
7545 SET_DECL_VALUE_EXPR (new_var, x);
7546 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7548 continue;
7551 if (!maybe_lookup_field (var, ctx))
7552 continue;
7554 /* Don't remap oacc parallel reduction variables, because the
7555 intermediate result must be local to each gang. */
7556 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7557 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7559 x = build_receiver_ref (var, true, ctx);
7560 tree new_var = lookup_decl (var, ctx);
7562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7563 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7564 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7565 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7566 x = build_simple_mem_ref (x);
7567 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7569 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7570 if (omp_is_reference (new_var))
7572 /* Create a local object to hold the instance
7573 value. */
7574 tree type = TREE_TYPE (TREE_TYPE (new_var));
7575 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7576 tree inst = create_tmp_var (type, id);
7577 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7578 x = build_fold_addr_expr (inst);
7580 gimplify_assign (new_var, x, &fplist);
7582 else if (DECL_P (new_var))
7584 SET_DECL_VALUE_EXPR (new_var, x);
7585 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7587 else
7588 gcc_unreachable ();
7590 map_cnt++;
7591 break;
7593 case OMP_CLAUSE_FIRSTPRIVATE:
7594 if (is_oacc_parallel (ctx))
7595 goto oacc_firstprivate;
7596 map_cnt++;
7597 var = OMP_CLAUSE_DECL (c);
7598 if (!omp_is_reference (var)
7599 && !is_gimple_reg_type (TREE_TYPE (var)))
7601 tree new_var = lookup_decl (var, ctx);
7602 if (is_variable_sized (var))
7604 tree pvar = DECL_VALUE_EXPR (var);
7605 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7606 pvar = TREE_OPERAND (pvar, 0);
7607 gcc_assert (DECL_P (pvar));
7608 tree new_pvar = lookup_decl (pvar, ctx);
7609 x = build_fold_indirect_ref (new_pvar);
7610 TREE_THIS_NOTRAP (x) = 1;
7612 else
7613 x = build_receiver_ref (var, true, ctx);
7614 SET_DECL_VALUE_EXPR (new_var, x);
7615 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7617 break;
7619 case OMP_CLAUSE_PRIVATE:
7620 if (is_gimple_omp_oacc (ctx->stmt))
7621 break;
7622 var = OMP_CLAUSE_DECL (c);
7623 if (is_variable_sized (var))
7625 tree new_var = lookup_decl (var, ctx);
7626 tree pvar = DECL_VALUE_EXPR (var);
7627 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7628 pvar = TREE_OPERAND (pvar, 0);
7629 gcc_assert (DECL_P (pvar));
7630 tree new_pvar = lookup_decl (pvar, ctx);
7631 x = build_fold_indirect_ref (new_pvar);
7632 TREE_THIS_NOTRAP (x) = 1;
7633 SET_DECL_VALUE_EXPR (new_var, x);
7634 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7636 break;
7638 case OMP_CLAUSE_USE_DEVICE_PTR:
7639 case OMP_CLAUSE_IS_DEVICE_PTR:
7640 var = OMP_CLAUSE_DECL (c);
7641 map_cnt++;
7642 if (is_variable_sized (var))
7644 tree new_var = lookup_decl (var, ctx);
7645 tree pvar = DECL_VALUE_EXPR (var);
7646 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7647 pvar = TREE_OPERAND (pvar, 0);
7648 gcc_assert (DECL_P (pvar));
7649 tree new_pvar = lookup_decl (pvar, ctx);
7650 x = build_fold_indirect_ref (new_pvar);
7651 TREE_THIS_NOTRAP (x) = 1;
7652 SET_DECL_VALUE_EXPR (new_var, x);
7653 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7655 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7657 tree new_var = lookup_decl (var, ctx);
7658 tree type = build_pointer_type (TREE_TYPE (var));
7659 x = create_tmp_var_raw (type, get_name (new_var));
7660 gimple_add_tmp_var (x);
7661 x = build_simple_mem_ref (x);
7662 SET_DECL_VALUE_EXPR (new_var, x);
7663 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7665 else
7667 tree new_var = lookup_decl (var, ctx);
7668 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7669 gimple_add_tmp_var (x);
7670 SET_DECL_VALUE_EXPR (new_var, x);
7671 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7673 break;
7676 if (offloaded)
7678 target_nesting_level++;
7679 lower_omp (&tgt_body, ctx);
7680 target_nesting_level--;
7682 else if (data_region)
7683 lower_omp (&tgt_body, ctx);
7685 if (offloaded)
7687 /* Declare all the variables created by mapping and the variables
7688 declared in the scope of the target body. */
7689 record_vars_into (ctx->block_vars, child_fn);
7690 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7693 olist = NULL;
7694 ilist = NULL;
7695 if (ctx->record_type)
7697 ctx->sender_decl
7698 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7699 DECL_NAMELESS (ctx->sender_decl) = 1;
7700 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7701 t = make_tree_vec (3);
7702 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7703 TREE_VEC_ELT (t, 1)
7704 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7705 ".omp_data_sizes");
7706 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7707 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7708 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7709 tree tkind_type = short_unsigned_type_node;
7710 int talign_shift = 8;
7711 TREE_VEC_ELT (t, 2)
7712 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7713 ".omp_data_kinds");
7714 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7715 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7716 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7717 gimple_omp_target_set_data_arg (stmt, t);
7719 vec<constructor_elt, va_gc> *vsize;
7720 vec<constructor_elt, va_gc> *vkind;
7721 vec_alloc (vsize, map_cnt);
7722 vec_alloc (vkind, map_cnt);
7723 unsigned int map_idx = 0;
7725 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7726 switch (OMP_CLAUSE_CODE (c))
7728 tree ovar, nc, s, purpose, var, x, type;
7729 unsigned int talign;
7731 default:
7732 break;
7734 case OMP_CLAUSE_MAP:
7735 case OMP_CLAUSE_TO:
7736 case OMP_CLAUSE_FROM:
7737 oacc_firstprivate_map:
7738 nc = c;
7739 ovar = OMP_CLAUSE_DECL (c);
7740 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7741 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7742 || (OMP_CLAUSE_MAP_KIND (c)
7743 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7744 break;
7745 if (!DECL_P (ovar))
7747 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7748 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7750 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7751 == get_base_address (ovar));
7752 nc = OMP_CLAUSE_CHAIN (c);
7753 ovar = OMP_CLAUSE_DECL (nc);
7755 else
7757 tree x = build_sender_ref (ovar, ctx);
7758 tree v
7759 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7760 gimplify_assign (x, v, &ilist);
7761 nc = NULL_TREE;
7764 else
7766 if (DECL_SIZE (ovar)
7767 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7769 tree ovar2 = DECL_VALUE_EXPR (ovar);
7770 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7771 ovar2 = TREE_OPERAND (ovar2, 0);
7772 gcc_assert (DECL_P (ovar2));
7773 ovar = ovar2;
7775 if (!maybe_lookup_field (ovar, ctx))
7776 continue;
7779 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7780 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7781 talign = DECL_ALIGN_UNIT (ovar);
7782 if (nc)
7784 var = lookup_decl_in_outer_ctx (ovar, ctx);
7785 x = build_sender_ref (ovar, ctx);
7787 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7788 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7789 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7790 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7792 gcc_assert (offloaded);
7793 tree avar
7794 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7795 mark_addressable (avar);
7796 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7797 talign = DECL_ALIGN_UNIT (avar);
7798 avar = build_fold_addr_expr (avar);
7799 gimplify_assign (x, avar, &ilist);
7801 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7803 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7804 if (!omp_is_reference (var))
7806 if (is_gimple_reg (var)
7807 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7808 TREE_NO_WARNING (var) = 1;
7809 var = build_fold_addr_expr (var);
7811 else
7812 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7813 gimplify_assign (x, var, &ilist);
7815 else if (is_gimple_reg (var))
7817 gcc_assert (offloaded);
7818 tree avar = create_tmp_var (TREE_TYPE (var));
7819 mark_addressable (avar);
7820 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7821 if (GOMP_MAP_COPY_TO_P (map_kind)
7822 || map_kind == GOMP_MAP_POINTER
7823 || map_kind == GOMP_MAP_TO_PSET
7824 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7826 /* If we need to initialize a temporary
7827 with VAR because it is not addressable, and
7828 the variable hasn't been initialized yet, then
7829 we'll get a warning for the store to avar.
7830 Don't warn in that case, the mapping might
7831 be implicit. */
7832 TREE_NO_WARNING (var) = 1;
7833 gimplify_assign (avar, var, &ilist);
7835 avar = build_fold_addr_expr (avar);
7836 gimplify_assign (x, avar, &ilist);
7837 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7838 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7839 && !TYPE_READONLY (TREE_TYPE (var)))
7841 x = unshare_expr (x);
7842 x = build_simple_mem_ref (x);
7843 gimplify_assign (var, x, &olist);
7846 else
7848 var = build_fold_addr_expr (var);
7849 gimplify_assign (x, var, &ilist);
7852 s = NULL_TREE;
7853 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7855 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7856 s = TREE_TYPE (ovar);
7857 if (TREE_CODE (s) == REFERENCE_TYPE)
7858 s = TREE_TYPE (s);
7859 s = TYPE_SIZE_UNIT (s);
7861 else
7862 s = OMP_CLAUSE_SIZE (c);
7863 if (s == NULL_TREE)
7864 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7865 s = fold_convert (size_type_node, s);
7866 purpose = size_int (map_idx++);
7867 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7868 if (TREE_CODE (s) != INTEGER_CST)
7869 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7871 unsigned HOST_WIDE_INT tkind, tkind_zero;
7872 switch (OMP_CLAUSE_CODE (c))
7874 case OMP_CLAUSE_MAP:
7875 tkind = OMP_CLAUSE_MAP_KIND (c);
7876 tkind_zero = tkind;
7877 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7878 switch (tkind)
7880 case GOMP_MAP_ALLOC:
7881 case GOMP_MAP_TO:
7882 case GOMP_MAP_FROM:
7883 case GOMP_MAP_TOFROM:
7884 case GOMP_MAP_ALWAYS_TO:
7885 case GOMP_MAP_ALWAYS_FROM:
7886 case GOMP_MAP_ALWAYS_TOFROM:
7887 case GOMP_MAP_RELEASE:
7888 case GOMP_MAP_FORCE_TO:
7889 case GOMP_MAP_FORCE_FROM:
7890 case GOMP_MAP_FORCE_TOFROM:
7891 case GOMP_MAP_FORCE_PRESENT:
7892 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7893 break;
7894 case GOMP_MAP_DELETE:
7895 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7896 default:
7897 break;
7899 if (tkind_zero != tkind)
7901 if (integer_zerop (s))
7902 tkind = tkind_zero;
7903 else if (integer_nonzerop (s))
7904 tkind_zero = tkind;
7906 break;
7907 case OMP_CLAUSE_FIRSTPRIVATE:
7908 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7909 tkind = GOMP_MAP_TO;
7910 tkind_zero = tkind;
7911 break;
7912 case OMP_CLAUSE_TO:
7913 tkind = GOMP_MAP_TO;
7914 tkind_zero = tkind;
7915 break;
7916 case OMP_CLAUSE_FROM:
7917 tkind = GOMP_MAP_FROM;
7918 tkind_zero = tkind;
7919 break;
7920 default:
7921 gcc_unreachable ();
7923 gcc_checking_assert (tkind
7924 < (HOST_WIDE_INT_C (1U) << talign_shift));
7925 gcc_checking_assert (tkind_zero
7926 < (HOST_WIDE_INT_C (1U) << talign_shift));
7927 talign = ceil_log2 (talign);
7928 tkind |= talign << talign_shift;
7929 tkind_zero |= talign << talign_shift;
7930 gcc_checking_assert (tkind
7931 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7932 gcc_checking_assert (tkind_zero
7933 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7934 if (tkind == tkind_zero)
7935 x = build_int_cstu (tkind_type, tkind);
7936 else
7938 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
7939 x = build3 (COND_EXPR, tkind_type,
7940 fold_build2 (EQ_EXPR, boolean_type_node,
7941 unshare_expr (s), size_zero_node),
7942 build_int_cstu (tkind_type, tkind_zero),
7943 build_int_cstu (tkind_type, tkind));
7945 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
7946 if (nc && nc != c)
7947 c = nc;
7948 break;
7950 case OMP_CLAUSE_FIRSTPRIVATE:
7951 if (is_oacc_parallel (ctx))
7952 goto oacc_firstprivate_map;
7953 ovar = OMP_CLAUSE_DECL (c);
7954 if (omp_is_reference (ovar))
7955 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7956 else
7957 talign = DECL_ALIGN_UNIT (ovar);
7958 var = lookup_decl_in_outer_ctx (ovar, ctx);
7959 x = build_sender_ref (ovar, ctx);
7960 tkind = GOMP_MAP_FIRSTPRIVATE;
7961 type = TREE_TYPE (ovar);
7962 if (omp_is_reference (ovar))
7963 type = TREE_TYPE (type);
7964 if ((INTEGRAL_TYPE_P (type)
7965 && TYPE_PRECISION (type) <= POINTER_SIZE)
7966 || TREE_CODE (type) == POINTER_TYPE)
7968 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
7969 tree t = var;
7970 if (omp_is_reference (var))
7971 t = build_simple_mem_ref (var);
7972 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7973 TREE_NO_WARNING (var) = 1;
7974 if (TREE_CODE (type) != POINTER_TYPE)
7975 t = fold_convert (pointer_sized_int_node, t);
7976 t = fold_convert (TREE_TYPE (x), t);
7977 gimplify_assign (x, t, &ilist);
7979 else if (omp_is_reference (var))
7980 gimplify_assign (x, var, &ilist);
7981 else if (is_gimple_reg (var))
7983 tree avar = create_tmp_var (TREE_TYPE (var));
7984 mark_addressable (avar);
7985 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7986 TREE_NO_WARNING (var) = 1;
7987 gimplify_assign (avar, var, &ilist);
7988 avar = build_fold_addr_expr (avar);
7989 gimplify_assign (x, avar, &ilist);
7991 else
7993 var = build_fold_addr_expr (var);
7994 gimplify_assign (x, var, &ilist);
7996 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
7997 s = size_int (0);
7998 else if (omp_is_reference (ovar))
7999 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8000 else
8001 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8002 s = fold_convert (size_type_node, s);
8003 purpose = size_int (map_idx++);
8004 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8005 if (TREE_CODE (s) != INTEGER_CST)
8006 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8008 gcc_checking_assert (tkind
8009 < (HOST_WIDE_INT_C (1U) << talign_shift));
8010 talign = ceil_log2 (talign);
8011 tkind |= talign << talign_shift;
8012 gcc_checking_assert (tkind
8013 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8014 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8015 build_int_cstu (tkind_type, tkind));
8016 break;
8018 case OMP_CLAUSE_USE_DEVICE_PTR:
8019 case OMP_CLAUSE_IS_DEVICE_PTR:
8020 ovar = OMP_CLAUSE_DECL (c);
8021 var = lookup_decl_in_outer_ctx (ovar, ctx);
8022 x = build_sender_ref (ovar, ctx);
8023 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8024 tkind = GOMP_MAP_USE_DEVICE_PTR;
8025 else
8026 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8027 type = TREE_TYPE (ovar);
8028 if (TREE_CODE (type) == ARRAY_TYPE)
8029 var = build_fold_addr_expr (var);
8030 else
8032 if (omp_is_reference (ovar))
8034 type = TREE_TYPE (type);
8035 if (TREE_CODE (type) != ARRAY_TYPE)
8036 var = build_simple_mem_ref (var);
8037 var = fold_convert (TREE_TYPE (x), var);
8040 gimplify_assign (x, var, &ilist);
8041 s = size_int (0);
8042 purpose = size_int (map_idx++);
8043 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8044 gcc_checking_assert (tkind
8045 < (HOST_WIDE_INT_C (1U) << talign_shift));
8046 gcc_checking_assert (tkind
8047 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8048 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8049 build_int_cstu (tkind_type, tkind));
8050 break;
8053 gcc_assert (map_idx == map_cnt);
8055 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8056 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8057 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8058 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8059 for (int i = 1; i <= 2; i++)
8060 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8062 gimple_seq initlist = NULL;
8063 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8064 TREE_VEC_ELT (t, i)),
8065 &initlist, true, NULL_TREE);
8066 gimple_seq_add_seq (&ilist, initlist);
8068 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8069 NULL);
8070 TREE_THIS_VOLATILE (clobber) = 1;
8071 gimple_seq_add_stmt (&olist,
8072 gimple_build_assign (TREE_VEC_ELT (t, i),
8073 clobber));
8076 tree clobber = build_constructor (ctx->record_type, NULL);
8077 TREE_THIS_VOLATILE (clobber) = 1;
8078 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8079 clobber));
8082 /* Once all the expansions are done, sequence all the different
8083 fragments inside gimple_omp_body. */
8085 new_body = NULL;
8087 if (offloaded
8088 && ctx->record_type)
8090 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8091 /* fixup_child_record_type might have changed receiver_decl's type. */
8092 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8093 gimple_seq_add_stmt (&new_body,
8094 gimple_build_assign (ctx->receiver_decl, t));
8096 gimple_seq_add_seq (&new_body, fplist);
8098 if (offloaded || data_region)
8100 tree prev = NULL_TREE;
8101 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8102 switch (OMP_CLAUSE_CODE (c))
8104 tree var, x;
8105 default:
8106 break;
8107 case OMP_CLAUSE_FIRSTPRIVATE:
8108 if (is_gimple_omp_oacc (ctx->stmt))
8109 break;
8110 var = OMP_CLAUSE_DECL (c);
8111 if (omp_is_reference (var)
8112 || is_gimple_reg_type (TREE_TYPE (var)))
8114 tree new_var = lookup_decl (var, ctx);
8115 tree type;
8116 type = TREE_TYPE (var);
8117 if (omp_is_reference (var))
8118 type = TREE_TYPE (type);
8119 if ((INTEGRAL_TYPE_P (type)
8120 && TYPE_PRECISION (type) <= POINTER_SIZE)
8121 || TREE_CODE (type) == POINTER_TYPE)
8123 x = build_receiver_ref (var, false, ctx);
8124 if (TREE_CODE (type) != POINTER_TYPE)
8125 x = fold_convert (pointer_sized_int_node, x);
8126 x = fold_convert (type, x);
8127 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8128 fb_rvalue);
8129 if (omp_is_reference (var))
8131 tree v = create_tmp_var_raw (type, get_name (var));
8132 gimple_add_tmp_var (v);
8133 TREE_ADDRESSABLE (v) = 1;
8134 gimple_seq_add_stmt (&new_body,
8135 gimple_build_assign (v, x));
8136 x = build_fold_addr_expr (v);
8138 gimple_seq_add_stmt (&new_body,
8139 gimple_build_assign (new_var, x));
8141 else
8143 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8144 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8145 fb_rvalue);
8146 gimple_seq_add_stmt (&new_body,
8147 gimple_build_assign (new_var, x));
8150 else if (is_variable_sized (var))
8152 tree pvar = DECL_VALUE_EXPR (var);
8153 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8154 pvar = TREE_OPERAND (pvar, 0);
8155 gcc_assert (DECL_P (pvar));
8156 tree new_var = lookup_decl (pvar, ctx);
8157 x = build_receiver_ref (var, false, ctx);
8158 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8159 gimple_seq_add_stmt (&new_body,
8160 gimple_build_assign (new_var, x));
8162 break;
8163 case OMP_CLAUSE_PRIVATE:
8164 if (is_gimple_omp_oacc (ctx->stmt))
8165 break;
8166 var = OMP_CLAUSE_DECL (c);
8167 if (omp_is_reference (var))
8169 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8170 tree new_var = lookup_decl (var, ctx);
8171 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8172 if (TREE_CONSTANT (x))
8174 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8175 get_name (var));
8176 gimple_add_tmp_var (x);
8177 TREE_ADDRESSABLE (x) = 1;
8178 x = build_fold_addr_expr_loc (clause_loc, x);
8180 else
8181 break;
8183 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8184 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8185 gimple_seq_add_stmt (&new_body,
8186 gimple_build_assign (new_var, x));
8188 break;
8189 case OMP_CLAUSE_USE_DEVICE_PTR:
8190 case OMP_CLAUSE_IS_DEVICE_PTR:
8191 var = OMP_CLAUSE_DECL (c);
8192 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8193 x = build_sender_ref (var, ctx);
8194 else
8195 x = build_receiver_ref (var, false, ctx);
8196 if (is_variable_sized (var))
8198 tree pvar = DECL_VALUE_EXPR (var);
8199 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8200 pvar = TREE_OPERAND (pvar, 0);
8201 gcc_assert (DECL_P (pvar));
8202 tree new_var = lookup_decl (pvar, ctx);
8203 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8204 gimple_seq_add_stmt (&new_body,
8205 gimple_build_assign (new_var, x));
8207 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8209 tree new_var = lookup_decl (var, ctx);
8210 new_var = DECL_VALUE_EXPR (new_var);
8211 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8212 new_var = TREE_OPERAND (new_var, 0);
8213 gcc_assert (DECL_P (new_var));
8214 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8215 gimple_seq_add_stmt (&new_body,
8216 gimple_build_assign (new_var, x));
8218 else
8220 tree type = TREE_TYPE (var);
8221 tree new_var = lookup_decl (var, ctx);
8222 if (omp_is_reference (var))
8224 type = TREE_TYPE (type);
8225 if (TREE_CODE (type) != ARRAY_TYPE)
8227 tree v = create_tmp_var_raw (type, get_name (var));
8228 gimple_add_tmp_var (v);
8229 TREE_ADDRESSABLE (v) = 1;
8230 x = fold_convert (type, x);
8231 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8232 fb_rvalue);
8233 gimple_seq_add_stmt (&new_body,
8234 gimple_build_assign (v, x));
8235 x = build_fold_addr_expr (v);
8238 new_var = DECL_VALUE_EXPR (new_var);
8239 x = fold_convert (TREE_TYPE (new_var), x);
8240 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8241 gimple_seq_add_stmt (&new_body,
8242 gimple_build_assign (new_var, x));
8244 break;
8246 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8247 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8248 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8249 or references to VLAs. */
8250 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8251 switch (OMP_CLAUSE_CODE (c))
8253 tree var;
8254 default:
8255 break;
8256 case OMP_CLAUSE_MAP:
8257 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8258 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8260 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8261 HOST_WIDE_INT offset = 0;
8262 gcc_assert (prev);
8263 var = OMP_CLAUSE_DECL (c);
8264 if (DECL_P (var)
8265 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8266 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8267 ctx))
8268 && varpool_node::get_create (var)->offloadable)
8269 break;
8270 if (TREE_CODE (var) == INDIRECT_REF
8271 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8272 var = TREE_OPERAND (var, 0);
8273 if (TREE_CODE (var) == COMPONENT_REF)
8275 var = get_addr_base_and_unit_offset (var, &offset);
8276 gcc_assert (var != NULL_TREE && DECL_P (var));
8278 else if (DECL_SIZE (var)
8279 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8281 tree var2 = DECL_VALUE_EXPR (var);
8282 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8283 var2 = TREE_OPERAND (var2, 0);
8284 gcc_assert (DECL_P (var2));
8285 var = var2;
8287 tree new_var = lookup_decl (var, ctx), x;
8288 tree type = TREE_TYPE (new_var);
8289 bool is_ref;
8290 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8291 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8292 == COMPONENT_REF))
8294 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8295 is_ref = true;
8296 new_var = build2 (MEM_REF, type,
8297 build_fold_addr_expr (new_var),
8298 build_int_cst (build_pointer_type (type),
8299 offset));
8301 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8303 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8304 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8305 new_var = build2 (MEM_REF, type,
8306 build_fold_addr_expr (new_var),
8307 build_int_cst (build_pointer_type (type),
8308 offset));
8310 else
8311 is_ref = omp_is_reference (var);
8312 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8313 is_ref = false;
8314 bool ref_to_array = false;
8315 if (is_ref)
8317 type = TREE_TYPE (type);
8318 if (TREE_CODE (type) == ARRAY_TYPE)
8320 type = build_pointer_type (type);
8321 ref_to_array = true;
8324 else if (TREE_CODE (type) == ARRAY_TYPE)
8326 tree decl2 = DECL_VALUE_EXPR (new_var);
8327 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8328 decl2 = TREE_OPERAND (decl2, 0);
8329 gcc_assert (DECL_P (decl2));
8330 new_var = decl2;
8331 type = TREE_TYPE (new_var);
8333 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8334 x = fold_convert_loc (clause_loc, type, x);
8335 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8337 tree bias = OMP_CLAUSE_SIZE (c);
8338 if (DECL_P (bias))
8339 bias = lookup_decl (bias, ctx);
8340 bias = fold_convert_loc (clause_loc, sizetype, bias);
8341 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8342 bias);
8343 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8344 TREE_TYPE (x), x, bias);
8346 if (ref_to_array)
8347 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8348 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8349 if (is_ref && !ref_to_array)
8351 tree t = create_tmp_var_raw (type, get_name (var));
8352 gimple_add_tmp_var (t);
8353 TREE_ADDRESSABLE (t) = 1;
8354 gimple_seq_add_stmt (&new_body,
8355 gimple_build_assign (t, x));
8356 x = build_fold_addr_expr_loc (clause_loc, t);
8358 gimple_seq_add_stmt (&new_body,
8359 gimple_build_assign (new_var, x));
8360 prev = NULL_TREE;
8362 else if (OMP_CLAUSE_CHAIN (c)
8363 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8364 == OMP_CLAUSE_MAP
8365 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8366 == GOMP_MAP_FIRSTPRIVATE_POINTER
8367 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8368 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8369 prev = c;
8370 break;
8371 case OMP_CLAUSE_PRIVATE:
8372 var = OMP_CLAUSE_DECL (c);
8373 if (is_variable_sized (var))
8375 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8376 tree new_var = lookup_decl (var, ctx);
8377 tree pvar = DECL_VALUE_EXPR (var);
8378 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8379 pvar = TREE_OPERAND (pvar, 0);
8380 gcc_assert (DECL_P (pvar));
8381 tree new_pvar = lookup_decl (pvar, ctx);
8382 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8383 tree al = size_int (DECL_ALIGN (var));
8384 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8385 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8386 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8387 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8388 gimple_seq_add_stmt (&new_body,
8389 gimple_build_assign (new_pvar, x));
8391 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8393 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8394 tree new_var = lookup_decl (var, ctx);
8395 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8396 if (TREE_CONSTANT (x))
8397 break;
8398 else
8400 tree atmp
8401 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8402 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8403 tree al = size_int (TYPE_ALIGN (rtype));
8404 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8407 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8408 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8409 gimple_seq_add_stmt (&new_body,
8410 gimple_build_assign (new_var, x));
8412 break;
8415 gimple_seq fork_seq = NULL;
8416 gimple_seq join_seq = NULL;
8418 if (is_oacc_parallel (ctx))
8420 /* If there are reductions on the offloaded region itself, treat
8421 them as a dummy GANG loop. */
8422 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8424 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8425 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8428 gimple_seq_add_seq (&new_body, fork_seq);
8429 gimple_seq_add_seq (&new_body, tgt_body);
8430 gimple_seq_add_seq (&new_body, join_seq);
8432 if (offloaded)
8433 new_body = maybe_catch_exception (new_body);
8435 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8436 gimple_omp_set_body (stmt, new_body);
8439 bind = gimple_build_bind (NULL, NULL,
8440 tgt_bind ? gimple_bind_block (tgt_bind)
8441 : NULL_TREE);
8442 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8443 gimple_bind_add_seq (bind, ilist);
8444 gimple_bind_add_stmt (bind, stmt);
8445 gimple_bind_add_seq (bind, olist);
8447 pop_gimplify_context (NULL);
8449 if (dep_bind)
8451 gimple_bind_add_seq (dep_bind, dep_ilist);
8452 gimple_bind_add_stmt (dep_bind, bind);
8453 gimple_bind_add_seq (dep_bind, dep_olist);
8454 pop_gimplify_context (dep_bind);
8458 /* Expand code for an OpenMP teams directive. */
8460 static void
8461 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8463 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8464 push_gimplify_context ();
8466 tree block = make_node (BLOCK);
8467 gbind *bind = gimple_build_bind (NULL, NULL, block);
8468 gsi_replace (gsi_p, bind, true);
8469 gimple_seq bind_body = NULL;
8470 gimple_seq dlist = NULL;
8471 gimple_seq olist = NULL;
8473 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8474 OMP_CLAUSE_NUM_TEAMS);
8475 if (num_teams == NULL_TREE)
8476 num_teams = build_int_cst (unsigned_type_node, 0);
8477 else
8479 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8480 num_teams = fold_convert (unsigned_type_node, num_teams);
8481 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8483 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8484 OMP_CLAUSE_THREAD_LIMIT);
8485 if (thread_limit == NULL_TREE)
8486 thread_limit = build_int_cst (unsigned_type_node, 0);
8487 else
8489 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8490 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8491 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8492 fb_rvalue);
8495 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8496 &bind_body, &dlist, ctx, NULL);
8497 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8498 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8499 if (!gimple_omp_teams_grid_phony (teams_stmt))
8501 gimple_seq_add_stmt (&bind_body, teams_stmt);
8502 location_t loc = gimple_location (teams_stmt);
8503 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8504 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8505 gimple_set_location (call, loc);
8506 gimple_seq_add_stmt (&bind_body, call);
8509 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8510 gimple_omp_set_body (teams_stmt, NULL);
8511 gimple_seq_add_seq (&bind_body, olist);
8512 gimple_seq_add_seq (&bind_body, dlist);
8513 if (!gimple_omp_teams_grid_phony (teams_stmt))
8514 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8515 gimple_bind_set_body (bind, bind_body);
8517 pop_gimplify_context (bind);
8519 gimple_bind_append_vars (bind, ctx->block_vars);
8520 BLOCK_VARS (block) = ctx->block_vars;
8521 if (BLOCK_VARS (block))
8522 TREE_USED (block) = 1;
8525 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8527 static void
8528 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8530 gimple *stmt = gsi_stmt (*gsi_p);
8531 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8532 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8533 gimple_build_omp_return (false));
8537 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8538 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8539 of OMP context, but with task_shared_vars set. */
8541 static tree
8542 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8543 void *data)
8545 tree t = *tp;
8547 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8548 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8549 return t;
8551 if (task_shared_vars
8552 && DECL_P (t)
8553 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8554 return t;
8556 /* If a global variable has been privatized, TREE_CONSTANT on
8557 ADDR_EXPR might be wrong. */
8558 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8559 recompute_tree_invariant_for_addr_expr (t);
8561 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8562 return NULL_TREE;
8565 /* Data to be communicated between lower_omp_regimplify_operands and
8566 lower_omp_regimplify_operands_p. */
8568 struct lower_omp_regimplify_operands_data
8570 omp_context *ctx;
8571 vec<tree> *decls;
8574 /* Helper function for lower_omp_regimplify_operands. Find
8575 omp_member_access_dummy_var vars and adjust temporarily their
8576 DECL_VALUE_EXPRs if needed. */
8578 static tree
8579 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8580 void *data)
8582 tree t = omp_member_access_dummy_var (*tp);
8583 if (t)
8585 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8586 lower_omp_regimplify_operands_data *ldata
8587 = (lower_omp_regimplify_operands_data *) wi->info;
8588 tree o = maybe_lookup_decl (t, ldata->ctx);
8589 if (o != t)
8591 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8592 ldata->decls->safe_push (*tp);
8593 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8594 SET_DECL_VALUE_EXPR (*tp, v);
8597 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8598 return NULL_TREE;
8601 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8602 of omp_member_access_dummy_var vars during regimplification. */
8604 static void
8605 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8606 gimple_stmt_iterator *gsi_p)
8608 auto_vec<tree, 10> decls;
8609 if (ctx)
8611 struct walk_stmt_info wi;
8612 memset (&wi, '\0', sizeof (wi));
8613 struct lower_omp_regimplify_operands_data data;
8614 data.ctx = ctx;
8615 data.decls = &decls;
8616 wi.info = &data;
8617 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8619 gimple_regimplify_operands (stmt, gsi_p);
8620 while (!decls.is_empty ())
8622 tree t = decls.pop ();
8623 tree v = decls.pop ();
8624 SET_DECL_VALUE_EXPR (t, v);
8628 static void
8629 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8631 gimple *stmt = gsi_stmt (*gsi_p);
8632 struct walk_stmt_info wi;
8633 gcall *call_stmt;
8635 if (gimple_has_location (stmt))
8636 input_location = gimple_location (stmt);
8638 if (task_shared_vars)
8639 memset (&wi, '\0', sizeof (wi));
8641 /* If we have issued syntax errors, avoid doing any heavy lifting.
8642 Just replace the OMP directives with a NOP to avoid
8643 confusing RTL expansion. */
8644 if (seen_error () && is_gimple_omp (stmt))
8646 gsi_replace (gsi_p, gimple_build_nop (), true);
8647 return;
8650 switch (gimple_code (stmt))
8652 case GIMPLE_COND:
8654 gcond *cond_stmt = as_a <gcond *> (stmt);
8655 if ((ctx || task_shared_vars)
8656 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8657 lower_omp_regimplify_p,
8658 ctx ? NULL : &wi, NULL)
8659 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8660 lower_omp_regimplify_p,
8661 ctx ? NULL : &wi, NULL)))
8662 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8664 break;
8665 case GIMPLE_CATCH:
8666 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8667 break;
8668 case GIMPLE_EH_FILTER:
8669 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8670 break;
8671 case GIMPLE_TRY:
8672 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8673 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8674 break;
8675 case GIMPLE_TRANSACTION:
8676 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8677 ctx);
8678 break;
8679 case GIMPLE_BIND:
8680 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8681 break;
8682 case GIMPLE_OMP_PARALLEL:
8683 case GIMPLE_OMP_TASK:
8684 ctx = maybe_lookup_ctx (stmt);
8685 gcc_assert (ctx);
8686 if (ctx->cancellable)
8687 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8688 lower_omp_taskreg (gsi_p, ctx);
8689 break;
8690 case GIMPLE_OMP_FOR:
8691 ctx = maybe_lookup_ctx (stmt);
8692 gcc_assert (ctx);
8693 if (ctx->cancellable)
8694 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8695 lower_omp_for (gsi_p, ctx);
8696 break;
8697 case GIMPLE_OMP_SECTIONS:
8698 ctx = maybe_lookup_ctx (stmt);
8699 gcc_assert (ctx);
8700 if (ctx->cancellable)
8701 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8702 lower_omp_sections (gsi_p, ctx);
8703 break;
8704 case GIMPLE_OMP_SINGLE:
8705 ctx = maybe_lookup_ctx (stmt);
8706 gcc_assert (ctx);
8707 lower_omp_single (gsi_p, ctx);
8708 break;
8709 case GIMPLE_OMP_MASTER:
8710 ctx = maybe_lookup_ctx (stmt);
8711 gcc_assert (ctx);
8712 lower_omp_master (gsi_p, ctx);
8713 break;
8714 case GIMPLE_OMP_TASKGROUP:
8715 ctx = maybe_lookup_ctx (stmt);
8716 gcc_assert (ctx);
8717 lower_omp_taskgroup (gsi_p, ctx);
8718 break;
8719 case GIMPLE_OMP_ORDERED:
8720 ctx = maybe_lookup_ctx (stmt);
8721 gcc_assert (ctx);
8722 lower_omp_ordered (gsi_p, ctx);
8723 break;
8724 case GIMPLE_OMP_CRITICAL:
8725 ctx = maybe_lookup_ctx (stmt);
8726 gcc_assert (ctx);
8727 lower_omp_critical (gsi_p, ctx);
8728 break;
8729 case GIMPLE_OMP_ATOMIC_LOAD:
8730 if ((ctx || task_shared_vars)
8731 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8732 as_a <gomp_atomic_load *> (stmt)),
8733 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8734 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8735 break;
8736 case GIMPLE_OMP_TARGET:
8737 ctx = maybe_lookup_ctx (stmt);
8738 gcc_assert (ctx);
8739 lower_omp_target (gsi_p, ctx);
8740 break;
8741 case GIMPLE_OMP_TEAMS:
8742 ctx = maybe_lookup_ctx (stmt);
8743 gcc_assert (ctx);
8744 lower_omp_teams (gsi_p, ctx);
8745 break;
8746 case GIMPLE_OMP_GRID_BODY:
8747 ctx = maybe_lookup_ctx (stmt);
8748 gcc_assert (ctx);
8749 lower_omp_grid_body (gsi_p, ctx);
8750 break;
8751 case GIMPLE_CALL:
8752 tree fndecl;
8753 call_stmt = as_a <gcall *> (stmt);
8754 fndecl = gimple_call_fndecl (call_stmt);
8755 if (fndecl
8756 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8757 switch (DECL_FUNCTION_CODE (fndecl))
8759 case BUILT_IN_GOMP_BARRIER:
8760 if (ctx == NULL)
8761 break;
8762 /* FALLTHRU */
8763 case BUILT_IN_GOMP_CANCEL:
8764 case BUILT_IN_GOMP_CANCELLATION_POINT:
8765 omp_context *cctx;
8766 cctx = ctx;
8767 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8768 cctx = cctx->outer;
8769 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8770 if (!cctx->cancellable)
8772 if (DECL_FUNCTION_CODE (fndecl)
8773 == BUILT_IN_GOMP_CANCELLATION_POINT)
8775 stmt = gimple_build_nop ();
8776 gsi_replace (gsi_p, stmt, false);
8778 break;
8780 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8782 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8783 gimple_call_set_fndecl (call_stmt, fndecl);
8784 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8786 tree lhs;
8787 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8788 gimple_call_set_lhs (call_stmt, lhs);
8789 tree fallthru_label;
8790 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8791 gimple *g;
8792 g = gimple_build_label (fallthru_label);
8793 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8794 g = gimple_build_cond (NE_EXPR, lhs,
8795 fold_convert (TREE_TYPE (lhs),
8796 boolean_false_node),
8797 cctx->cancel_label, fallthru_label);
8798 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8799 break;
8800 default:
8801 break;
8803 /* FALLTHRU */
8804 default:
8805 if ((ctx || task_shared_vars)
8806 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8807 ctx ? NULL : &wi))
8809 /* Just remove clobbers, this should happen only if we have
8810 "privatized" local addressable variables in SIMD regions,
8811 the clobber isn't needed in that case and gimplifying address
8812 of the ARRAY_REF into a pointer and creating MEM_REF based
8813 clobber would create worse code than we get with the clobber
8814 dropped. */
8815 if (gimple_clobber_p (stmt))
8817 gsi_replace (gsi_p, gimple_build_nop (), true);
8818 break;
8820 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8822 break;
8826 static void
8827 lower_omp (gimple_seq *body, omp_context *ctx)
8829 location_t saved_location = input_location;
8830 gimple_stmt_iterator gsi;
8831 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8832 lower_omp_1 (&gsi, ctx);
8833 /* During gimplification, we haven't folded statments inside offloading
8834 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8835 if (target_nesting_level || taskreg_nesting_level)
8836 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8837 fold_stmt (&gsi);
8838 input_location = saved_location;
8841 /* Main entry point. */
8843 static unsigned int
8844 execute_lower_omp (void)
8846 gimple_seq body;
8847 int i;
8848 omp_context *ctx;
8850 /* This pass always runs, to provide PROP_gimple_lomp.
8851 But often, there is nothing to do. */
8852 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8853 && flag_openmp_simd == 0)
8854 return 0;
8856 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8857 delete_omp_context);
8859 body = gimple_body (current_function_decl);
8861 if (hsa_gen_requested_p ())
8862 omp_grid_gridify_all_targets (&body);
8864 scan_omp (&body, NULL);
8865 gcc_assert (taskreg_nesting_level == 0);
8866 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8867 finish_taskreg_scan (ctx);
8868 taskreg_contexts.release ();
8870 if (all_contexts->root)
8872 if (task_shared_vars)
8873 push_gimplify_context ();
8874 lower_omp (&body, NULL);
8875 if (task_shared_vars)
8876 pop_gimplify_context (NULL);
8879 if (all_contexts)
8881 splay_tree_delete (all_contexts);
8882 all_contexts = NULL;
8884 BITMAP_FREE (task_shared_vars);
8885 return 0;
8888 namespace {
8890 const pass_data pass_data_lower_omp =
8892 GIMPLE_PASS, /* type */
8893 "omplower", /* name */
8894 OPTGROUP_OPENMP, /* optinfo_flags */
8895 TV_NONE, /* tv_id */
8896 PROP_gimple_any, /* properties_required */
8897 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8898 0, /* properties_destroyed */
8899 0, /* todo_flags_start */
8900 0, /* todo_flags_finish */
8903 class pass_lower_omp : public gimple_opt_pass
8905 public:
8906 pass_lower_omp (gcc::context *ctxt)
8907 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8910 /* opt_pass methods: */
8911 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8913 }; // class pass_lower_omp
8915 } // anon namespace
8917 gimple_opt_pass *
8918 make_pass_lower_omp (gcc::context *ctxt)
8920 return new pass_lower_omp (ctxt);
8923 /* The following is a utility to diagnose structured block violations.
8924 It is not part of the "omplower" pass, as that's invoked too late. It
8925 should be invoked by the respective front ends after gimplification. */
8927 static splay_tree all_labels;
8929 /* Check for mismatched contexts and generate an error if needed. Return
8930 true if an error is detected. */
8932 static bool
8933 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
8934 gimple *branch_ctx, gimple *label_ctx)
8936 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
8937 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
8939 if (label_ctx == branch_ctx)
8940 return false;
8942 const char* kind = NULL;
8944 if (flag_cilkplus)
8946 if ((branch_ctx
8947 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
8948 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
8949 || (label_ctx
8950 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
8951 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
8952 kind = "Cilk Plus";
8954 if (flag_openacc)
8956 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
8957 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
8959 gcc_checking_assert (kind == NULL);
8960 kind = "OpenACC";
8963 if (kind == NULL)
8965 gcc_checking_assert (flag_openmp);
8966 kind = "OpenMP";
8969 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
8970 so we could traverse it and issue a correct "exit" or "enter" error
8971 message upon a structured block violation.
8973 We built the context by building a list with tree_cons'ing, but there is
8974 no easy counterpart in gimple tuples. It seems like far too much work
8975 for issuing exit/enter error messages. If someone really misses the
8976 distinct error message... patches welcome. */
8978 #if 0
8979 /* Try to avoid confusing the user by producing and error message
8980 with correct "exit" or "enter" verbiage. We prefer "exit"
8981 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
8982 if (branch_ctx == NULL)
8983 exit_p = false;
8984 else
8986 while (label_ctx)
8988 if (TREE_VALUE (label_ctx) == branch_ctx)
8990 exit_p = false;
8991 break;
8993 label_ctx = TREE_CHAIN (label_ctx);
8997 if (exit_p)
8998 error ("invalid exit from %s structured block", kind);
8999 else
9000 error ("invalid entry to %s structured block", kind);
9001 #endif
9003 /* If it's obvious we have an invalid entry, be specific about the error. */
9004 if (branch_ctx == NULL)
9005 error ("invalid entry to %s structured block", kind);
9006 else
9008 /* Otherwise, be vague and lazy, but efficient. */
9009 error ("invalid branch to/from %s structured block", kind);
9012 gsi_replace (gsi_p, gimple_build_nop (), false);
9013 return true;
9016 /* Pass 1: Create a minimal tree of structured blocks, and record
9017 where each label is found. */
9019 static tree
9020 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9021 struct walk_stmt_info *wi)
9023 gimple *context = (gimple *) wi->info;
9024 gimple *inner_context;
9025 gimple *stmt = gsi_stmt (*gsi_p);
9027 *handled_ops_p = true;
9029 switch (gimple_code (stmt))
9031 WALK_SUBSTMTS;
9033 case GIMPLE_OMP_PARALLEL:
9034 case GIMPLE_OMP_TASK:
9035 case GIMPLE_OMP_SECTIONS:
9036 case GIMPLE_OMP_SINGLE:
9037 case GIMPLE_OMP_SECTION:
9038 case GIMPLE_OMP_MASTER:
9039 case GIMPLE_OMP_ORDERED:
9040 case GIMPLE_OMP_CRITICAL:
9041 case GIMPLE_OMP_TARGET:
9042 case GIMPLE_OMP_TEAMS:
9043 case GIMPLE_OMP_TASKGROUP:
9044 /* The minimal context here is just the current OMP construct. */
9045 inner_context = stmt;
9046 wi->info = inner_context;
9047 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9048 wi->info = context;
9049 break;
9051 case GIMPLE_OMP_FOR:
9052 inner_context = stmt;
9053 wi->info = inner_context;
9054 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9055 walk them. */
9056 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9057 diagnose_sb_1, NULL, wi);
9058 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9059 wi->info = context;
9060 break;
9062 case GIMPLE_LABEL:
9063 splay_tree_insert (all_labels,
9064 (splay_tree_key) gimple_label_label (
9065 as_a <glabel *> (stmt)),
9066 (splay_tree_value) context);
9067 break;
9069 default:
9070 break;
9073 return NULL_TREE;
9076 /* Pass 2: Check each branch and see if its context differs from that of
9077 the destination label's context. */
9079 static tree
9080 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9081 struct walk_stmt_info *wi)
9083 gimple *context = (gimple *) wi->info;
9084 splay_tree_node n;
9085 gimple *stmt = gsi_stmt (*gsi_p);
9087 *handled_ops_p = true;
9089 switch (gimple_code (stmt))
9091 WALK_SUBSTMTS;
9093 case GIMPLE_OMP_PARALLEL:
9094 case GIMPLE_OMP_TASK:
9095 case GIMPLE_OMP_SECTIONS:
9096 case GIMPLE_OMP_SINGLE:
9097 case GIMPLE_OMP_SECTION:
9098 case GIMPLE_OMP_MASTER:
9099 case GIMPLE_OMP_ORDERED:
9100 case GIMPLE_OMP_CRITICAL:
9101 case GIMPLE_OMP_TARGET:
9102 case GIMPLE_OMP_TEAMS:
9103 case GIMPLE_OMP_TASKGROUP:
9104 wi->info = stmt;
9105 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9106 wi->info = context;
9107 break;
9109 case GIMPLE_OMP_FOR:
9110 wi->info = stmt;
9111 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9112 walk them. */
9113 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9114 diagnose_sb_2, NULL, wi);
9115 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9116 wi->info = context;
9117 break;
9119 case GIMPLE_COND:
9121 gcond *cond_stmt = as_a <gcond *> (stmt);
9122 tree lab = gimple_cond_true_label (cond_stmt);
9123 if (lab)
9125 n = splay_tree_lookup (all_labels,
9126 (splay_tree_key) lab);
9127 diagnose_sb_0 (gsi_p, context,
9128 n ? (gimple *) n->value : NULL);
9130 lab = gimple_cond_false_label (cond_stmt);
9131 if (lab)
9133 n = splay_tree_lookup (all_labels,
9134 (splay_tree_key) lab);
9135 diagnose_sb_0 (gsi_p, context,
9136 n ? (gimple *) n->value : NULL);
9139 break;
9141 case GIMPLE_GOTO:
9143 tree lab = gimple_goto_dest (stmt);
9144 if (TREE_CODE (lab) != LABEL_DECL)
9145 break;
9147 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9148 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9150 break;
9152 case GIMPLE_SWITCH:
9154 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9155 unsigned int i;
9156 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9158 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9159 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9160 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9161 break;
9164 break;
9166 case GIMPLE_RETURN:
9167 diagnose_sb_0 (gsi_p, context, NULL);
9168 break;
9170 default:
9171 break;
9174 return NULL_TREE;
9177 static unsigned int
9178 diagnose_omp_structured_block_errors (void)
9180 struct walk_stmt_info wi;
9181 gimple_seq body = gimple_body (current_function_decl);
9183 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9185 memset (&wi, 0, sizeof (wi));
9186 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9188 memset (&wi, 0, sizeof (wi));
9189 wi.want_locations = true;
9190 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9192 gimple_set_body (current_function_decl, body);
9194 splay_tree_delete (all_labels);
9195 all_labels = NULL;
9197 return 0;
9200 namespace {
9202 const pass_data pass_data_diagnose_omp_blocks =
9204 GIMPLE_PASS, /* type */
9205 "*diagnose_omp_blocks", /* name */
9206 OPTGROUP_OPENMP, /* optinfo_flags */
9207 TV_NONE, /* tv_id */
9208 PROP_gimple_any, /* properties_required */
9209 0, /* properties_provided */
9210 0, /* properties_destroyed */
9211 0, /* todo_flags_start */
9212 0, /* todo_flags_finish */
9215 class pass_diagnose_omp_blocks : public gimple_opt_pass
9217 public:
9218 pass_diagnose_omp_blocks (gcc::context *ctxt)
9219 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9222 /* opt_pass methods: */
9223 virtual bool gate (function *)
9225 return flag_cilkplus || flag_openacc || flag_openmp;
9227 virtual unsigned int execute (function *)
9229 return diagnose_omp_structured_block_errors ();
9232 }; // class pass_diagnose_omp_blocks
9234 } // anon namespace
9236 gimple_opt_pass *
9237 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9239 return new pass_diagnose_omp_blocks (ctxt);
9243 #include "gt-omp-low.h"