Fix PR c++/79360
[official-gcc.git] / gcc / omp-low.c
blobff0f4477cd774f0e9c2eb6df14ffb16e942d770c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
115 /* What to do with variables with implicitly determined sharing
116 attributes. */
117 enum omp_clause_default_kind default_kind;
119 /* Nesting depth of this context. Used to beautify error messages re
120 invalid gotos. The outermost ctx is depth 1, with depth 0 being
121 reserved for the main body of the function. */
122 int depth;
124 /* True if this parallel directive is nested within another. */
125 bool is_nested;
127 /* True if this construct can be cancelled. */
128 bool cancellable;
131 static splay_tree all_contexts;
132 static int taskreg_nesting_level;
133 static int target_nesting_level;
134 static bitmap task_shared_vars;
135 static vec<omp_context *> taskreg_contexts;
137 static void scan_omp (gimple_seq *, omp_context *);
138 static tree scan_omp_1_op (tree *, int *, void *);
140 #define WALK_SUBSTMTS \
141 case GIMPLE_BIND: \
142 case GIMPLE_TRY: \
143 case GIMPLE_CATCH: \
144 case GIMPLE_EH_FILTER: \
145 case GIMPLE_TRANSACTION: \
146 /* The sub-statements for these should be walked. */ \
147 *handled_ops_p = false; \
148 break;
150 /* Return true if CTX corresponds to an oacc parallel region. */
152 static bool
153 is_oacc_parallel (omp_context *ctx)
155 enum gimple_code outer_type = gimple_code (ctx->stmt);
156 return ((outer_type == GIMPLE_OMP_TARGET)
157 && (gimple_omp_target_kind (ctx->stmt)
158 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
161 /* Return true if CTX corresponds to an oacc kernels region. */
163 static bool
164 is_oacc_kernels (omp_context *ctx)
166 enum gimple_code outer_type = gimple_code (ctx->stmt);
167 return ((outer_type == GIMPLE_OMP_TARGET)
168 && (gimple_omp_target_kind (ctx->stmt)
169 == GF_OMP_TARGET_KIND_OACC_KERNELS));
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173 data member privatization, return the underlying "this" parameter,
174 otherwise return NULL. */
176 tree
177 omp_member_access_dummy_var (tree decl)
179 if (!VAR_P (decl)
180 || !DECL_ARTIFICIAL (decl)
181 || !DECL_IGNORED_P (decl)
182 || !DECL_HAS_VALUE_EXPR_P (decl)
183 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
184 return NULL_TREE;
186 tree v = DECL_VALUE_EXPR (decl);
187 if (TREE_CODE (v) != COMPONENT_REF)
188 return NULL_TREE;
190 while (1)
191 switch (TREE_CODE (v))
193 case COMPONENT_REF:
194 case MEM_REF:
195 case INDIRECT_REF:
196 CASE_CONVERT:
197 case POINTER_PLUS_EXPR:
198 v = TREE_OPERAND (v, 0);
199 continue;
200 case PARM_DECL:
201 if (DECL_CONTEXT (v) == current_function_decl
202 && DECL_ARTIFICIAL (v)
203 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
204 return v;
205 return NULL_TREE;
206 default:
207 return NULL_TREE;
211 /* Helper for unshare_and_remap, called through walk_tree. */
213 static tree
214 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
216 tree *pair = (tree *) data;
217 if (*tp == pair[0])
219 *tp = unshare_expr (pair[1]);
220 *walk_subtrees = 0;
222 else if (IS_TYPE_OR_DECL_P (*tp))
223 *walk_subtrees = 0;
224 return NULL_TREE;
227 /* Return unshare_expr (X) with all occurrences of FROM
228 replaced with TO. */
230 static tree
231 unshare_and_remap (tree x, tree from, tree to)
233 tree pair[2] = { from, to };
234 x = unshare_expr (x);
235 walk_tree (&x, unshare_and_remap_1, pair, NULL);
236 return x;
239 /* Convenience function for calling scan_omp_1_op on tree operands. */
241 static inline tree
242 scan_omp_op (tree *tp, omp_context *ctx)
244 struct walk_stmt_info wi;
246 memset (&wi, 0, sizeof (wi));
247 wi.info = ctx;
248 wi.want_locations = true;
250 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
253 static void lower_omp (gimple_seq *, omp_context *);
254 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
255 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
257 /* Return true if CTX is for an omp parallel. */
259 static inline bool
260 is_parallel_ctx (omp_context *ctx)
262 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
266 /* Return true if CTX is for an omp task. */
268 static inline bool
269 is_task_ctx (omp_context *ctx)
271 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
275 /* Return true if CTX is for an omp taskloop. */
277 static inline bool
278 is_taskloop_ctx (omp_context *ctx)
280 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
281 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
285 /* Return true if CTX is for an omp parallel or omp task. */
287 static inline bool
288 is_taskreg_ctx (omp_context *ctx)
290 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
293 /* Return true if EXPR is variable sized. */
295 static inline bool
296 is_variable_sized (const_tree expr)
298 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
301 /* Lookup variables. The "maybe" form
302 allows for the variable form to not have been entered, otherwise we
303 assert that the variable must have been entered. */
305 static inline tree
306 lookup_decl (tree var, omp_context *ctx)
308 tree *n = ctx->cb.decl_map->get (var);
309 return *n;
312 static inline tree
313 maybe_lookup_decl (const_tree var, omp_context *ctx)
315 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
316 return n ? *n : NULL_TREE;
319 static inline tree
320 lookup_field (tree var, omp_context *ctx)
322 splay_tree_node n;
323 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
324 return (tree) n->value;
327 static inline tree
328 lookup_sfield (splay_tree_key key, omp_context *ctx)
330 splay_tree_node n;
331 n = splay_tree_lookup (ctx->sfield_map
332 ? ctx->sfield_map : ctx->field_map, key);
333 return (tree) n->value;
336 static inline tree
337 lookup_sfield (tree var, omp_context *ctx)
339 return lookup_sfield ((splay_tree_key) var, ctx);
342 static inline tree
343 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
345 splay_tree_node n;
346 n = splay_tree_lookup (ctx->field_map, key);
347 return n ? (tree) n->value : NULL_TREE;
350 static inline tree
351 maybe_lookup_field (tree var, omp_context *ctx)
353 return maybe_lookup_field ((splay_tree_key) var, ctx);
356 /* Return true if DECL should be copied by pointer. SHARED_CTX is
357 the parallel context if DECL is to be shared. */
359 static bool
360 use_pointer_for_field (tree decl, omp_context *shared_ctx)
362 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
363 || TYPE_ATOMIC (TREE_TYPE (decl)))
364 return true;
366 /* We can only use copy-in/copy-out semantics for shared variables
367 when we know the value is not accessible from an outer scope. */
368 if (shared_ctx)
370 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
372 /* ??? Trivially accessible from anywhere. But why would we even
373 be passing an address in this case? Should we simply assert
374 this to be false, or should we have a cleanup pass that removes
375 these from the list of mappings? */
376 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
377 return true;
379 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 without analyzing the expression whether or not its location
381 is accessible to anyone else. In the case of nested parallel
382 regions it certainly may be. */
383 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
384 return true;
386 /* Do not use copy-in/copy-out for variables that have their
387 address taken. */
388 if (TREE_ADDRESSABLE (decl))
389 return true;
391 /* lower_send_shared_vars only uses copy-in, but not copy-out
392 for these. */
393 if (TREE_READONLY (decl)
394 || ((TREE_CODE (decl) == RESULT_DECL
395 || TREE_CODE (decl) == PARM_DECL)
396 && DECL_BY_REFERENCE (decl)))
397 return false;
399 /* Disallow copy-in/out in nested parallel if
400 decl is shared in outer parallel, otherwise
401 each thread could store the shared variable
402 in its own copy-in location, making the
403 variable no longer really shared. */
404 if (shared_ctx->is_nested)
406 omp_context *up;
408 for (up = shared_ctx->outer; up; up = up->outer)
409 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
410 break;
412 if (up)
414 tree c;
416 for (c = gimple_omp_taskreg_clauses (up->stmt);
417 c; c = OMP_CLAUSE_CHAIN (c))
418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
419 && OMP_CLAUSE_DECL (c) == decl)
420 break;
422 if (c)
423 goto maybe_mark_addressable_and_ret;
427 /* For tasks avoid using copy-in/out. As tasks can be
428 deferred or executed in different thread, when GOMP_task
429 returns, the task hasn't necessarily terminated. */
430 if (is_task_ctx (shared_ctx))
432 tree outer;
433 maybe_mark_addressable_and_ret:
434 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
435 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
437 /* Taking address of OUTER in lower_send_shared_vars
438 might need regimplification of everything that uses the
439 variable. */
440 if (!task_shared_vars)
441 task_shared_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
443 TREE_ADDRESSABLE (outer) = 1;
445 return true;
449 return false;
452 /* Construct a new automatic decl similar to VAR. */
454 static tree
455 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
457 tree copy = copy_var_decl (var, name, type);
459 DECL_CONTEXT (copy) = current_function_decl;
460 DECL_CHAIN (copy) = ctx->block_vars;
461 /* If VAR is listed in task_shared_vars, it means it wasn't
462 originally addressable and is just because task needs to take
463 it's address. But we don't need to take address of privatizations
464 from that var. */
465 if (TREE_ADDRESSABLE (var)
466 && task_shared_vars
467 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
468 TREE_ADDRESSABLE (copy) = 0;
469 ctx->block_vars = copy;
471 return copy;
474 static tree
475 omp_copy_decl_1 (tree var, omp_context *ctx)
477 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 as appropriate. */
482 static tree
483 omp_build_component_ref (tree obj, tree field)
485 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
486 if (TREE_THIS_VOLATILE (field))
487 TREE_THIS_VOLATILE (ret) |= 1;
488 if (TREE_READONLY (field))
489 TREE_READONLY (ret) |= 1;
490 return ret;
493 /* Build tree nodes to access the field for VAR on the receiver side. */
495 static tree
496 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
498 tree x, field = lookup_field (var, ctx);
500 /* If the receiver record type was remapped in the child function,
501 remap the field into the new record type. */
502 x = maybe_lookup_field (field, ctx);
503 if (x != NULL)
504 field = x;
506 x = build_simple_mem_ref (ctx->receiver_decl);
507 TREE_THIS_NOTRAP (x) = 1;
508 x = omp_build_component_ref (x, field);
509 if (by_ref)
511 x = build_simple_mem_ref (x);
512 TREE_THIS_NOTRAP (x) = 1;
515 return x;
518 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
519 of a parallel, this is a component reference; for workshare constructs
520 this is some variable. */
522 static tree
523 build_outer_var_ref (tree var, omp_context *ctx,
524 enum omp_clause_code code = OMP_CLAUSE_ERROR)
526 tree x;
528 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
529 x = var;
530 else if (is_variable_sized (var))
532 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
533 x = build_outer_var_ref (x, ctx, code);
534 x = build_simple_mem_ref (x);
536 else if (is_taskreg_ctx (ctx))
538 bool by_ref = use_pointer_for_field (var, NULL);
539 x = build_receiver_ref (var, by_ref, ctx);
541 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
542 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
543 || (code == OMP_CLAUSE_PRIVATE
544 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
545 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
546 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
548 /* #pragma omp simd isn't a worksharing construct, and can reference
549 even private vars in its linear etc. clauses.
550 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 to private vars in all worksharing constructs. */
552 x = NULL_TREE;
553 if (ctx->outer && is_taskreg_ctx (ctx))
554 x = lookup_decl (var, ctx->outer);
555 else if (ctx->outer)
556 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
557 if (x == NULL_TREE)
558 x = var;
560 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
562 gcc_assert (ctx->outer);
563 splay_tree_node n
564 = splay_tree_lookup (ctx->outer->field_map,
565 (splay_tree_key) &DECL_UID (var));
566 if (n == NULL)
568 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
569 x = var;
570 else
571 x = lookup_decl (var, ctx->outer);
573 else
575 tree field = (tree) n->value;
576 /* If the receiver record type was remapped in the child function,
577 remap the field into the new record type. */
578 x = maybe_lookup_field (field, ctx->outer);
579 if (x != NULL)
580 field = x;
582 x = build_simple_mem_ref (ctx->outer->receiver_decl);
583 x = omp_build_component_ref (x, field);
584 if (use_pointer_for_field (var, ctx->outer))
585 x = build_simple_mem_ref (x);
588 else if (ctx->outer)
590 omp_context *outer = ctx->outer;
591 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
593 outer = outer->outer;
594 gcc_assert (outer
595 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
597 x = lookup_decl (var, outer);
599 else if (omp_is_reference (var))
600 /* This can happen with orphaned constructs. If var is reference, it is
601 possible it is shared and as such valid. */
602 x = var;
603 else if (omp_member_access_dummy_var (var))
604 x = var;
605 else
606 gcc_unreachable ();
608 if (x == var)
610 tree t = omp_member_access_dummy_var (var);
611 if (t)
613 x = DECL_VALUE_EXPR (var);
614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
615 if (o != t)
616 x = unshare_and_remap (x, t, o);
617 else
618 x = unshare_expr (x);
622 if (omp_is_reference (var))
623 x = build_simple_mem_ref (x);
625 return x;
628 /* Build tree nodes to access the field for VAR on the sender side. */
630 static tree
631 build_sender_ref (splay_tree_key key, omp_context *ctx)
633 tree field = lookup_sfield (key, ctx);
634 return omp_build_component_ref (ctx->sender_decl, field);
637 static tree
638 build_sender_ref (tree var, omp_context *ctx)
640 return build_sender_ref ((splay_tree_key) var, ctx);
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
644 BASE_POINTERS_RESTRICT, declare the field with restrict. */
646 static void
647 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
648 bool base_pointers_restrict = false)
650 tree field, type, sfield = NULL_TREE;
651 splay_tree_key key = (splay_tree_key) var;
653 if ((mask & 8) != 0)
655 key = (splay_tree_key) &DECL_UID (var);
656 gcc_checking_assert (key != (splay_tree_key) var);
658 gcc_assert ((mask & 1) == 0
659 || !splay_tree_lookup (ctx->field_map, key));
660 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
661 || !splay_tree_lookup (ctx->sfield_map, key));
662 gcc_assert ((mask & 3) == 3
663 || !is_gimple_omp_oacc (ctx->stmt));
665 type = TREE_TYPE (var);
666 /* Prevent redeclaring the var in the split-off function with a restrict
667 pointer type. Note that we only clear type itself, restrict qualifiers in
668 the pointed-to type will be ignored by points-to analysis. */
669 if (POINTER_TYPE_P (type)
670 && TYPE_RESTRICT (type))
671 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
673 if (mask & 4)
675 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
676 type = build_pointer_type (build_pointer_type (type));
678 else if (by_ref)
680 type = build_pointer_type (type);
681 if (base_pointers_restrict)
682 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
684 else if ((mask & 3) == 1 && omp_is_reference (var))
685 type = TREE_TYPE (type);
687 field = build_decl (DECL_SOURCE_LOCATION (var),
688 FIELD_DECL, DECL_NAME (var), type);
690 /* Remember what variable this field was created for. This does have a
691 side effect of making dwarf2out ignore this member, so for helpful
692 debugging we clear it later in delete_omp_context. */
693 DECL_ABSTRACT_ORIGIN (field) = var;
694 if (type == TREE_TYPE (var))
696 SET_DECL_ALIGN (field, DECL_ALIGN (var));
697 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
698 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
700 else
701 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
703 if ((mask & 3) == 3)
705 insert_field_into_struct (ctx->record_type, field);
706 if (ctx->srecord_type)
708 sfield = build_decl (DECL_SOURCE_LOCATION (var),
709 FIELD_DECL, DECL_NAME (var), type);
710 DECL_ABSTRACT_ORIGIN (sfield) = var;
711 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
712 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
713 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
714 insert_field_into_struct (ctx->srecord_type, sfield);
717 else
719 if (ctx->srecord_type == NULL_TREE)
721 tree t;
723 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
724 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
725 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
727 sfield = build_decl (DECL_SOURCE_LOCATION (t),
728 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
729 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
730 insert_field_into_struct (ctx->srecord_type, sfield);
731 splay_tree_insert (ctx->sfield_map,
732 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
733 (splay_tree_value) sfield);
736 sfield = field;
737 insert_field_into_struct ((mask & 1) ? ctx->record_type
738 : ctx->srecord_type, field);
741 if (mask & 1)
742 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
743 if ((mask & 2) && ctx->sfield_map)
744 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
747 static tree
748 install_var_local (tree var, omp_context *ctx)
750 tree new_var = omp_copy_decl_1 (var, ctx);
751 insert_decl_map (&ctx->cb, var, new_var);
752 return new_var;
755 /* Adjust the replacement for DECL in CTX for the new context. This means
756 copying the DECL_VALUE_EXPR, and fixing up the type. */
758 static void
759 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
761 tree new_decl, size;
763 new_decl = lookup_decl (decl, ctx);
765 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
767 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
768 && DECL_HAS_VALUE_EXPR_P (decl))
770 tree ve = DECL_VALUE_EXPR (decl);
771 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
772 SET_DECL_VALUE_EXPR (new_decl, ve);
773 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
776 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
778 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
779 if (size == error_mark_node)
780 size = TYPE_SIZE (TREE_TYPE (new_decl));
781 DECL_SIZE (new_decl) = size;
783 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
784 if (size == error_mark_node)
785 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
786 DECL_SIZE_UNIT (new_decl) = size;
790 /* The callback for remap_decl. Search all containing contexts for a
791 mapping of the variable; this avoids having to duplicate the splay
792 tree ahead of time. We know a mapping doesn't already exist in the
793 given context. Create new mappings to implement default semantics. */
795 static tree
796 omp_copy_decl (tree var, copy_body_data *cb)
798 omp_context *ctx = (omp_context *) cb;
799 tree new_var;
801 if (TREE_CODE (var) == LABEL_DECL)
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_DEFAULT:
1166 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1167 break;
1169 case OMP_CLAUSE_FINAL:
1170 case OMP_CLAUSE_IF:
1171 case OMP_CLAUSE_NUM_THREADS:
1172 case OMP_CLAUSE_NUM_TEAMS:
1173 case OMP_CLAUSE_THREAD_LIMIT:
1174 case OMP_CLAUSE_DEVICE:
1175 case OMP_CLAUSE_SCHEDULE:
1176 case OMP_CLAUSE_DIST_SCHEDULE:
1177 case OMP_CLAUSE_DEPEND:
1178 case OMP_CLAUSE_PRIORITY:
1179 case OMP_CLAUSE_GRAINSIZE:
1180 case OMP_CLAUSE_NUM_TASKS:
1181 case OMP_CLAUSE__CILK_FOR_COUNT_:
1182 case OMP_CLAUSE_NUM_GANGS:
1183 case OMP_CLAUSE_NUM_WORKERS:
1184 case OMP_CLAUSE_VECTOR_LENGTH:
1185 if (ctx->outer)
1186 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1187 break;
1189 case OMP_CLAUSE_TO:
1190 case OMP_CLAUSE_FROM:
1191 case OMP_CLAUSE_MAP:
1192 if (ctx->outer)
1193 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1194 decl = OMP_CLAUSE_DECL (c);
1195 /* Global variables with "omp declare target" attribute
1196 don't need to be copied, the receiver side will use them
1197 directly. However, global variables with "omp declare target link"
1198 attribute need to be copied. */
1199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1200 && DECL_P (decl)
1201 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1202 && (OMP_CLAUSE_MAP_KIND (c)
1203 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1204 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1205 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1206 && varpool_node::get_create (decl)->offloadable
1207 && !lookup_attribute ("omp declare target link",
1208 DECL_ATTRIBUTES (decl)))
1209 break;
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1211 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1213 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1214 not offloaded; there is nothing to map for those. */
1215 if (!is_gimple_omp_offloaded (ctx->stmt)
1216 && !POINTER_TYPE_P (TREE_TYPE (decl))
1217 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1218 break;
1220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1221 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1222 || (OMP_CLAUSE_MAP_KIND (c)
1223 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1225 if (TREE_CODE (decl) == COMPONENT_REF
1226 || (TREE_CODE (decl) == INDIRECT_REF
1227 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1228 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1229 == REFERENCE_TYPE)))
1230 break;
1231 if (DECL_SIZE (decl)
1232 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1234 tree decl2 = DECL_VALUE_EXPR (decl);
1235 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1236 decl2 = TREE_OPERAND (decl2, 0);
1237 gcc_assert (DECL_P (decl2));
1238 install_var_local (decl2, ctx);
1240 install_var_local (decl, ctx);
1241 break;
1243 if (DECL_P (decl))
1245 if (DECL_SIZE (decl)
1246 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1248 tree decl2 = DECL_VALUE_EXPR (decl);
1249 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1250 decl2 = TREE_OPERAND (decl2, 0);
1251 gcc_assert (DECL_P (decl2));
1252 install_var_field (decl2, true, 3, ctx);
1253 install_var_local (decl2, ctx);
1254 install_var_local (decl, ctx);
1256 else
1258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1259 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1260 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1261 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1262 install_var_field (decl, true, 7, ctx);
1263 else
1264 install_var_field (decl, true, 3, ctx,
1265 base_pointers_restrict);
1266 if (is_gimple_omp_offloaded (ctx->stmt)
1267 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1268 install_var_local (decl, ctx);
1271 else
1273 tree base = get_base_address (decl);
1274 tree nc = OMP_CLAUSE_CHAIN (c);
1275 if (DECL_P (base)
1276 && nc != NULL_TREE
1277 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1278 && OMP_CLAUSE_DECL (nc) == base
1279 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1280 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1282 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1283 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1285 else
1287 if (ctx->outer)
1289 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1290 decl = OMP_CLAUSE_DECL (c);
1292 gcc_assert (!splay_tree_lookup (ctx->field_map,
1293 (splay_tree_key) decl));
1294 tree field
1295 = build_decl (OMP_CLAUSE_LOCATION (c),
1296 FIELD_DECL, NULL_TREE, ptr_type_node);
1297 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1298 insert_field_into_struct (ctx->record_type, field);
1299 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1300 (splay_tree_value) field);
1303 break;
1305 case OMP_CLAUSE__GRIDDIM_:
1306 if (ctx->outer)
1308 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1309 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1311 break;
1313 case OMP_CLAUSE_NOWAIT:
1314 case OMP_CLAUSE_ORDERED:
1315 case OMP_CLAUSE_COLLAPSE:
1316 case OMP_CLAUSE_UNTIED:
1317 case OMP_CLAUSE_MERGEABLE:
1318 case OMP_CLAUSE_PROC_BIND:
1319 case OMP_CLAUSE_SAFELEN:
1320 case OMP_CLAUSE_SIMDLEN:
1321 case OMP_CLAUSE_THREADS:
1322 case OMP_CLAUSE_SIMD:
1323 case OMP_CLAUSE_NOGROUP:
1324 case OMP_CLAUSE_DEFAULTMAP:
1325 case OMP_CLAUSE_ASYNC:
1326 case OMP_CLAUSE_WAIT:
1327 case OMP_CLAUSE_GANG:
1328 case OMP_CLAUSE_WORKER:
1329 case OMP_CLAUSE_VECTOR:
1330 case OMP_CLAUSE_INDEPENDENT:
1331 case OMP_CLAUSE_AUTO:
1332 case OMP_CLAUSE_SEQ:
1333 case OMP_CLAUSE__SIMT_:
1334 break;
1336 case OMP_CLAUSE_ALIGNED:
1337 decl = OMP_CLAUSE_DECL (c);
1338 if (is_global_var (decl)
1339 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1340 install_var_local (decl, ctx);
1341 break;
1343 case OMP_CLAUSE_TILE:
1344 case OMP_CLAUSE__CACHE_:
1345 default:
1346 gcc_unreachable ();
1350 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1352 switch (OMP_CLAUSE_CODE (c))
1354 case OMP_CLAUSE_LASTPRIVATE:
1355 /* Let the corresponding firstprivate clause create
1356 the variable. */
1357 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1358 scan_array_reductions = true;
1359 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1360 break;
1361 /* FALLTHRU */
1363 case OMP_CLAUSE_FIRSTPRIVATE:
1364 case OMP_CLAUSE_PRIVATE:
1365 case OMP_CLAUSE_LINEAR:
1366 case OMP_CLAUSE_IS_DEVICE_PTR:
1367 decl = OMP_CLAUSE_DECL (c);
1368 if (is_variable_sized (decl))
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 tree decl2 = DECL_VALUE_EXPR (decl);
1375 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1376 decl2 = TREE_OPERAND (decl2, 0);
1377 gcc_assert (DECL_P (decl2));
1378 install_var_local (decl2, ctx);
1379 fixup_remapped_decl (decl2, ctx, false);
1381 install_var_local (decl, ctx);
1383 fixup_remapped_decl (decl, ctx,
1384 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1385 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1386 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1387 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1388 scan_array_reductions = true;
1389 break;
1391 case OMP_CLAUSE_REDUCTION:
1392 decl = OMP_CLAUSE_DECL (c);
1393 if (TREE_CODE (decl) != MEM_REF)
1395 if (is_variable_sized (decl))
1396 install_var_local (decl, ctx);
1397 fixup_remapped_decl (decl, ctx, false);
1399 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1400 scan_array_reductions = true;
1401 break;
1403 case OMP_CLAUSE_SHARED:
1404 /* Ignore shared directives in teams construct. */
1405 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1406 break;
1407 decl = OMP_CLAUSE_DECL (c);
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1409 break;
1410 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1412 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1413 ctx->outer)))
1414 break;
1415 bool by_ref = use_pointer_for_field (decl, ctx);
1416 install_var_field (decl, by_ref, 11, ctx);
1417 break;
1419 fixup_remapped_decl (decl, ctx, false);
1420 break;
1422 case OMP_CLAUSE_MAP:
1423 if (!is_gimple_omp_offloaded (ctx->stmt))
1424 break;
1425 decl = OMP_CLAUSE_DECL (c);
1426 if (DECL_P (decl)
1427 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1428 && (OMP_CLAUSE_MAP_KIND (c)
1429 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1430 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1431 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1432 && varpool_node::get_create (decl)->offloadable)
1433 break;
1434 if (DECL_P (decl))
1436 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1437 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1438 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1439 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1441 tree new_decl = lookup_decl (decl, ctx);
1442 TREE_TYPE (new_decl)
1443 = remap_type (TREE_TYPE (decl), &ctx->cb);
1445 else if (DECL_SIZE (decl)
1446 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1448 tree decl2 = DECL_VALUE_EXPR (decl);
1449 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1450 decl2 = TREE_OPERAND (decl2, 0);
1451 gcc_assert (DECL_P (decl2));
1452 fixup_remapped_decl (decl2, ctx, false);
1453 fixup_remapped_decl (decl, ctx, true);
1455 else
1456 fixup_remapped_decl (decl, ctx, false);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 case OMP_CLAUSE_DEFAULT:
1463 case OMP_CLAUSE_IF:
1464 case OMP_CLAUSE_NUM_THREADS:
1465 case OMP_CLAUSE_NUM_TEAMS:
1466 case OMP_CLAUSE_THREAD_LIMIT:
1467 case OMP_CLAUSE_DEVICE:
1468 case OMP_CLAUSE_SCHEDULE:
1469 case OMP_CLAUSE_DIST_SCHEDULE:
1470 case OMP_CLAUSE_NOWAIT:
1471 case OMP_CLAUSE_ORDERED:
1472 case OMP_CLAUSE_COLLAPSE:
1473 case OMP_CLAUSE_UNTIED:
1474 case OMP_CLAUSE_FINAL:
1475 case OMP_CLAUSE_MERGEABLE:
1476 case OMP_CLAUSE_PROC_BIND:
1477 case OMP_CLAUSE_SAFELEN:
1478 case OMP_CLAUSE_SIMDLEN:
1479 case OMP_CLAUSE_ALIGNED:
1480 case OMP_CLAUSE_DEPEND:
1481 case OMP_CLAUSE__LOOPTEMP_:
1482 case OMP_CLAUSE_TO:
1483 case OMP_CLAUSE_FROM:
1484 case OMP_CLAUSE_PRIORITY:
1485 case OMP_CLAUSE_GRAINSIZE:
1486 case OMP_CLAUSE_NUM_TASKS:
1487 case OMP_CLAUSE_THREADS:
1488 case OMP_CLAUSE_SIMD:
1489 case OMP_CLAUSE_NOGROUP:
1490 case OMP_CLAUSE_DEFAULTMAP:
1491 case OMP_CLAUSE_USE_DEVICE_PTR:
1492 case OMP_CLAUSE__CILK_FOR_COUNT_:
1493 case OMP_CLAUSE_ASYNC:
1494 case OMP_CLAUSE_WAIT:
1495 case OMP_CLAUSE_NUM_GANGS:
1496 case OMP_CLAUSE_NUM_WORKERS:
1497 case OMP_CLAUSE_VECTOR_LENGTH:
1498 case OMP_CLAUSE_GANG:
1499 case OMP_CLAUSE_WORKER:
1500 case OMP_CLAUSE_VECTOR:
1501 case OMP_CLAUSE_INDEPENDENT:
1502 case OMP_CLAUSE_AUTO:
1503 case OMP_CLAUSE_SEQ:
1504 case OMP_CLAUSE__GRIDDIM_:
1505 case OMP_CLAUSE__SIMT_:
1506 break;
1508 case OMP_CLAUSE_TILE:
1509 case OMP_CLAUSE__CACHE_:
1510 default:
1511 gcc_unreachable ();
1515 gcc_checking_assert (!scan_array_reductions
1516 || !is_gimple_omp_oacc (ctx->stmt));
1517 if (scan_array_reductions)
1519 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1521 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1523 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1524 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1527 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1529 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1530 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1531 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1535 /* Create a new name for omp child function. Returns an identifier. If
1536 IS_CILK_FOR is true then the suffix for the child function is
1537 "_cilk_for_fn." */
1539 static tree
1540 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1542 if (is_cilk_for)
1543 return clone_function_name (current_function_decl, "_cilk_for_fn");
1544 return clone_function_name (current_function_decl,
1545 task_copy ? "_omp_cpyfn" : "_omp_fn");
1548 /* Returns the type of the induction variable for the child function for
1549 _Cilk_for and the types for _high and _low variables based on TYPE. */
1551 static tree
1552 cilk_for_check_loop_diff_type (tree type)
1554 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1556 if (TYPE_UNSIGNED (type))
1557 return uint32_type_node;
1558 else
1559 return integer_type_node;
1561 else
1563 if (TYPE_UNSIGNED (type))
1564 return uint64_type_node;
1565 else
1566 return long_long_integer_type_node;
1570 /* Return true if CTX may belong to offloaded code: either if current function
1571 is offloaded, or any enclosing context corresponds to a target region. */
1573 static bool
1574 omp_maybe_offloaded_ctx (omp_context *ctx)
1576 if (cgraph_node::get (current_function_decl)->offloadable)
1577 return true;
1578 for (; ctx; ctx = ctx->outer)
1579 if (is_gimple_omp_offloaded (ctx->stmt))
1580 return true;
1581 return false;
1584 /* Build a decl for the omp child function. It'll not contain a body
1585 yet, just the bare decl. */
1587 static void
1588 create_omp_child_function (omp_context *ctx, bool task_copy)
1590 tree decl, type, name, t;
1592 tree cilk_for_count
1593 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1594 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1595 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1596 tree cilk_var_type = NULL_TREE;
1598 name = create_omp_child_function_name (task_copy,
1599 cilk_for_count != NULL_TREE);
1600 if (task_copy)
1601 type = build_function_type_list (void_type_node, ptr_type_node,
1602 ptr_type_node, NULL_TREE);
1603 else if (cilk_for_count)
1605 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1606 cilk_var_type = cilk_for_check_loop_diff_type (type);
1607 type = build_function_type_list (void_type_node, ptr_type_node,
1608 cilk_var_type, cilk_var_type, NULL_TREE);
1610 else
1611 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1613 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1615 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1616 || !task_copy);
1617 if (!task_copy)
1618 ctx->cb.dst_fn = decl;
1619 else
1620 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1622 TREE_STATIC (decl) = 1;
1623 TREE_USED (decl) = 1;
1624 DECL_ARTIFICIAL (decl) = 1;
1625 DECL_IGNORED_P (decl) = 0;
1626 TREE_PUBLIC (decl) = 0;
1627 DECL_UNINLINABLE (decl) = 1;
1628 DECL_EXTERNAL (decl) = 0;
1629 DECL_CONTEXT (decl) = NULL_TREE;
1630 DECL_INITIAL (decl) = make_node (BLOCK);
1631 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1632 if (omp_maybe_offloaded_ctx (ctx))
1634 cgraph_node::get_create (decl)->offloadable = 1;
1635 if (ENABLE_OFFLOADING)
1636 g->have_offload = true;
1639 if (cgraph_node::get_create (decl)->offloadable
1640 && !lookup_attribute ("omp declare target",
1641 DECL_ATTRIBUTES (current_function_decl)))
1643 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1644 ? "omp target entrypoint"
1645 : "omp declare target");
1646 DECL_ATTRIBUTES (decl)
1647 = tree_cons (get_identifier (target_attr),
1648 NULL_TREE, DECL_ATTRIBUTES (decl));
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 RESULT_DECL, NULL_TREE, void_type_node);
1653 DECL_ARTIFICIAL (t) = 1;
1654 DECL_IGNORED_P (t) = 1;
1655 DECL_CONTEXT (t) = decl;
1656 DECL_RESULT (decl) = t;
1658 /* _Cilk_for's child function requires two extra parameters called
1659 __low and __high that are set the by Cilk runtime when it calls this
1660 function. */
1661 if (cilk_for_count)
1663 t = build_decl (DECL_SOURCE_LOCATION (decl),
1664 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1665 DECL_ARTIFICIAL (t) = 1;
1666 DECL_NAMELESS (t) = 1;
1667 DECL_ARG_TYPE (t) = ptr_type_node;
1668 DECL_CONTEXT (t) = current_function_decl;
1669 TREE_USED (t) = 1;
1670 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1671 DECL_ARGUMENTS (decl) = t;
1673 t = build_decl (DECL_SOURCE_LOCATION (decl),
1674 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1675 DECL_ARTIFICIAL (t) = 1;
1676 DECL_NAMELESS (t) = 1;
1677 DECL_ARG_TYPE (t) = ptr_type_node;
1678 DECL_CONTEXT (t) = current_function_decl;
1679 TREE_USED (t) = 1;
1680 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1681 DECL_ARGUMENTS (decl) = t;
1684 tree data_name = get_identifier (".omp_data_i");
1685 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1686 ptr_type_node);
1687 DECL_ARTIFICIAL (t) = 1;
1688 DECL_NAMELESS (t) = 1;
1689 DECL_ARG_TYPE (t) = ptr_type_node;
1690 DECL_CONTEXT (t) = current_function_decl;
1691 TREE_USED (t) = 1;
1692 TREE_READONLY (t) = 1;
1693 if (cilk_for_count)
1694 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1695 DECL_ARGUMENTS (decl) = t;
1696 if (!task_copy)
1697 ctx->receiver_decl = t;
1698 else
1700 t = build_decl (DECL_SOURCE_LOCATION (decl),
1701 PARM_DECL, get_identifier (".omp_data_o"),
1702 ptr_type_node);
1703 DECL_ARTIFICIAL (t) = 1;
1704 DECL_NAMELESS (t) = 1;
1705 DECL_ARG_TYPE (t) = ptr_type_node;
1706 DECL_CONTEXT (t) = current_function_decl;
1707 TREE_USED (t) = 1;
1708 TREE_ADDRESSABLE (t) = 1;
1709 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1710 DECL_ARGUMENTS (decl) = t;
1713 /* Allocate memory for the function structure. The call to
1714 allocate_struct_function clobbers CFUN, so we need to restore
1715 it afterward. */
1716 push_struct_function (decl);
1717 cfun->function_end_locus = gimple_location (ctx->stmt);
1718 init_tree_ssa (cfun);
1719 pop_cfun ();
1722 /* Callback for walk_gimple_seq. Check if combined parallel
1723 contains gimple_omp_for_combined_into_p OMP_FOR. */
1725 tree
1726 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1727 bool *handled_ops_p,
1728 struct walk_stmt_info *wi)
1730 gimple *stmt = gsi_stmt (*gsi_p);
1732 *handled_ops_p = true;
1733 switch (gimple_code (stmt))
1735 WALK_SUBSTMTS;
1737 case GIMPLE_OMP_FOR:
1738 if (gimple_omp_for_combined_into_p (stmt)
1739 && gimple_omp_for_kind (stmt)
1740 == *(const enum gf_mask *) (wi->info))
1742 wi->info = stmt;
1743 return integer_zero_node;
1745 break;
1746 default:
1747 break;
1749 return NULL;
1752 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1754 static void
1755 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1756 omp_context *outer_ctx)
1758 struct walk_stmt_info wi;
1760 memset (&wi, 0, sizeof (wi));
1761 wi.val_only = true;
1762 wi.info = (void *) &msk;
1763 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1764 if (wi.info != (void *) &msk)
1766 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1767 struct omp_for_data fd;
1768 omp_extract_for_data (for_stmt, &fd, NULL);
1769 /* We need two temporaries with fd.loop.v type (istart/iend)
1770 and then (fd.collapse - 1) temporaries with the same
1771 type for count2 ... countN-1 vars if not constant. */
1772 size_t count = 2, i;
1773 tree type = fd.iter_type;
1774 if (fd.collapse > 1
1775 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1777 count += fd.collapse - 1;
1778 /* If there are lastprivate clauses on the inner
1779 GIMPLE_OMP_FOR, add one more temporaries for the total number
1780 of iterations (product of count1 ... countN-1). */
1781 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1782 OMP_CLAUSE_LASTPRIVATE))
1783 count++;
1784 else if (msk == GF_OMP_FOR_KIND_FOR
1785 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1786 OMP_CLAUSE_LASTPRIVATE))
1787 count++;
1789 for (i = 0; i < count; i++)
1791 tree temp = create_tmp_var (type);
1792 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1793 insert_decl_map (&outer_ctx->cb, temp, temp);
1794 OMP_CLAUSE_DECL (c) = temp;
1795 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1796 gimple_omp_taskreg_set_clauses (stmt, c);
1801 /* Scan an OpenMP parallel directive. */
1803 static void
1804 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 omp_context *ctx;
1807 tree name;
1808 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1810 /* Ignore parallel directives with empty bodies, unless there
1811 are copyin clauses. */
1812 if (optimize > 0
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1815 OMP_CLAUSE_COPYIN) == NULL)
1817 gsi_replace (gsi, gimple_build_nop (), false);
1818 return;
1821 if (gimple_omp_parallel_combined_p (stmt))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1824 ctx = new_omp_context (stmt, outer_ctx);
1825 taskreg_contexts.safe_push (ctx);
1826 if (taskreg_nesting_level > 1)
1827 ctx->is_nested = true;
1828 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1829 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1830 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1831 name = create_tmp_var_name (".omp_data_s");
1832 name = build_decl (gimple_location (stmt),
1833 TYPE_DECL, name, ctx->record_type);
1834 DECL_ARTIFICIAL (name) = 1;
1835 DECL_NAMELESS (name) = 1;
1836 TYPE_NAME (ctx->record_type) = name;
1837 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1838 if (!gimple_omp_parallel_grid_phony (stmt))
1840 create_omp_child_function (ctx, false);
1841 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1844 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1845 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1847 if (TYPE_FIELDS (ctx->record_type) == NULL)
1848 ctx->record_type = ctx->receiver_decl = NULL;
1851 /* Scan an OpenMP task directive. */
1853 static void
1854 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1856 omp_context *ctx;
1857 tree name, t;
1858 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1860 /* Ignore task directives with empty bodies. */
1861 if (optimize > 0
1862 && empty_body_p (gimple_omp_body (stmt)))
1864 gsi_replace (gsi, gimple_build_nop (), false);
1865 return;
1868 if (gimple_omp_task_taskloop_p (stmt))
1869 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1871 ctx = new_omp_context (stmt, outer_ctx);
1872 taskreg_contexts.safe_push (ctx);
1873 if (taskreg_nesting_level > 1)
1874 ctx->is_nested = true;
1875 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1876 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1877 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1878 name = create_tmp_var_name (".omp_data_s");
1879 name = build_decl (gimple_location (stmt),
1880 TYPE_DECL, name, ctx->record_type);
1881 DECL_ARTIFICIAL (name) = 1;
1882 DECL_NAMELESS (name) = 1;
1883 TYPE_NAME (ctx->record_type) = name;
1884 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1885 create_omp_child_function (ctx, false);
1886 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1888 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1890 if (ctx->srecord_type)
1892 name = create_tmp_var_name (".omp_data_a");
1893 name = build_decl (gimple_location (stmt),
1894 TYPE_DECL, name, ctx->srecord_type);
1895 DECL_ARTIFICIAL (name) = 1;
1896 DECL_NAMELESS (name) = 1;
1897 TYPE_NAME (ctx->srecord_type) = name;
1898 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1899 create_omp_child_function (ctx, true);
1902 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1904 if (TYPE_FIELDS (ctx->record_type) == NULL)
1906 ctx->record_type = ctx->receiver_decl = NULL;
1907 t = build_int_cst (long_integer_type_node, 0);
1908 gimple_omp_task_set_arg_size (stmt, t);
1909 t = build_int_cst (long_integer_type_node, 1);
1910 gimple_omp_task_set_arg_align (stmt, t);
1915 /* If any decls have been made addressable during scan_omp,
1916 adjust their fields if needed, and layout record types
1917 of parallel/task constructs. */
1919 static void
1920 finish_taskreg_scan (omp_context *ctx)
1922 if (ctx->record_type == NULL_TREE)
1923 return;
1925 /* If any task_shared_vars were needed, verify all
1926 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1927 statements if use_pointer_for_field hasn't changed
1928 because of that. If it did, update field types now. */
1929 if (task_shared_vars)
1931 tree c;
1933 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1934 c; c = OMP_CLAUSE_CHAIN (c))
1935 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1936 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1938 tree decl = OMP_CLAUSE_DECL (c);
1940 /* Global variables don't need to be copied,
1941 the receiver side will use them directly. */
1942 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1943 continue;
1944 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1945 || !use_pointer_for_field (decl, ctx))
1946 continue;
1947 tree field = lookup_field (decl, ctx);
1948 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1949 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1950 continue;
1951 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1952 TREE_THIS_VOLATILE (field) = 0;
1953 DECL_USER_ALIGN (field) = 0;
1954 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1955 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1956 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1957 if (ctx->srecord_type)
1959 tree sfield = lookup_sfield (decl, ctx);
1960 TREE_TYPE (sfield) = TREE_TYPE (field);
1961 TREE_THIS_VOLATILE (sfield) = 0;
1962 DECL_USER_ALIGN (sfield) = 0;
1963 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1964 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1965 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1970 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1972 layout_type (ctx->record_type);
1973 fixup_child_record_type (ctx);
1975 else
1977 location_t loc = gimple_location (ctx->stmt);
1978 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1979 /* Move VLA fields to the end. */
1980 p = &TYPE_FIELDS (ctx->record_type);
1981 while (*p)
1982 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1983 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1985 *q = *p;
1986 *p = TREE_CHAIN (*p);
1987 TREE_CHAIN (*q) = NULL_TREE;
1988 q = &TREE_CHAIN (*q);
1990 else
1991 p = &DECL_CHAIN (*p);
1992 *p = vla_fields;
1993 if (gimple_omp_task_taskloop_p (ctx->stmt))
1995 /* Move fields corresponding to first and second _looptemp_
1996 clause first. There are filled by GOMP_taskloop
1997 and thus need to be in specific positions. */
1998 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1999 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2000 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2001 OMP_CLAUSE__LOOPTEMP_);
2002 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2003 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2004 p = &TYPE_FIELDS (ctx->record_type);
2005 while (*p)
2006 if (*p == f1 || *p == f2)
2007 *p = DECL_CHAIN (*p);
2008 else
2009 p = &DECL_CHAIN (*p);
2010 DECL_CHAIN (f1) = f2;
2011 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2012 TYPE_FIELDS (ctx->record_type) = f1;
2013 if (ctx->srecord_type)
2015 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2016 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2017 p = &TYPE_FIELDS (ctx->srecord_type);
2018 while (*p)
2019 if (*p == f1 || *p == f2)
2020 *p = DECL_CHAIN (*p);
2021 else
2022 p = &DECL_CHAIN (*p);
2023 DECL_CHAIN (f1) = f2;
2024 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2025 TYPE_FIELDS (ctx->srecord_type) = f1;
2028 layout_type (ctx->record_type);
2029 fixup_child_record_type (ctx);
2030 if (ctx->srecord_type)
2031 layout_type (ctx->srecord_type);
2032 tree t = fold_convert_loc (loc, long_integer_type_node,
2033 TYPE_SIZE_UNIT (ctx->record_type));
2034 gimple_omp_task_set_arg_size (ctx->stmt, t);
2035 t = build_int_cst (long_integer_type_node,
2036 TYPE_ALIGN_UNIT (ctx->record_type));
2037 gimple_omp_task_set_arg_align (ctx->stmt, t);
2041 /* Find the enclosing offload context. */
2043 static omp_context *
2044 enclosing_target_ctx (omp_context *ctx)
2046 for (; ctx; ctx = ctx->outer)
2047 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2048 break;
2050 return ctx;
2053 /* Return true if ctx is part of an oacc kernels region. */
2055 static bool
2056 ctx_in_oacc_kernels_region (omp_context *ctx)
2058 for (;ctx != NULL; ctx = ctx->outer)
2060 gimple *stmt = ctx->stmt;
2061 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2062 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2063 return true;
2066 return false;
2069 /* Check the parallelism clauses inside a kernels regions.
2070 Until kernels handling moves to use the same loop indirection
2071 scheme as parallel, we need to do this checking early. */
2073 static unsigned
2074 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2076 bool checking = true;
2077 unsigned outer_mask = 0;
2078 unsigned this_mask = 0;
2079 bool has_seq = false, has_auto = false;
2081 if (ctx->outer)
2082 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2083 if (!stmt)
2085 checking = false;
2086 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2087 return outer_mask;
2088 stmt = as_a <gomp_for *> (ctx->stmt);
2091 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2093 switch (OMP_CLAUSE_CODE (c))
2095 case OMP_CLAUSE_GANG:
2096 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2097 break;
2098 case OMP_CLAUSE_WORKER:
2099 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2100 break;
2101 case OMP_CLAUSE_VECTOR:
2102 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2103 break;
2104 case OMP_CLAUSE_SEQ:
2105 has_seq = true;
2106 break;
2107 case OMP_CLAUSE_AUTO:
2108 has_auto = true;
2109 break;
2110 default:
2111 break;
2115 if (checking)
2117 if (has_seq && (this_mask || has_auto))
2118 error_at (gimple_location (stmt), "%<seq%> overrides other"
2119 " OpenACC loop specifiers");
2120 else if (has_auto && this_mask)
2121 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2122 " OpenACC loop specifiers");
2124 if (this_mask & outer_mask)
2125 error_at (gimple_location (stmt), "inner loop uses same"
2126 " OpenACC parallelism as containing loop");
2129 return outer_mask | this_mask;
2132 /* Scan a GIMPLE_OMP_FOR. */
2134 static omp_context *
2135 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2137 omp_context *ctx;
2138 size_t i;
2139 tree clauses = gimple_omp_for_clauses (stmt);
2141 ctx = new_omp_context (stmt, outer_ctx);
2143 if (is_gimple_omp_oacc (stmt))
2145 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2147 if (!tgt || is_oacc_parallel (tgt))
2148 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2150 char const *check = NULL;
2152 switch (OMP_CLAUSE_CODE (c))
2154 case OMP_CLAUSE_GANG:
2155 check = "gang";
2156 break;
2158 case OMP_CLAUSE_WORKER:
2159 check = "worker";
2160 break;
2162 case OMP_CLAUSE_VECTOR:
2163 check = "vector";
2164 break;
2166 default:
2167 break;
2170 if (check && OMP_CLAUSE_OPERAND (c, 0))
2171 error_at (gimple_location (stmt),
2172 "argument not permitted on %qs clause in"
2173 " OpenACC %<parallel%>", check);
2176 if (tgt && is_oacc_kernels (tgt))
2178 /* Strip out reductions, as they are not handled yet. */
2179 tree *prev_ptr = &clauses;
2181 while (tree probe = *prev_ptr)
2183 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2185 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2186 *prev_ptr = *next_ptr;
2187 else
2188 prev_ptr = next_ptr;
2191 gimple_omp_for_set_clauses (stmt, clauses);
2192 check_oacc_kernel_gwv (stmt, ctx);
2196 scan_sharing_clauses (clauses, ctx);
2198 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2199 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2201 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2202 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2203 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2204 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2206 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2207 return ctx;
2210 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2212 static void
2213 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2214 omp_context *outer_ctx)
2216 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2217 gsi_replace (gsi, bind, false);
2218 gimple_seq seq = NULL;
2219 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2220 tree cond = create_tmp_var_raw (integer_type_node);
2221 DECL_CONTEXT (cond) = current_function_decl;
2222 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2223 gimple_bind_set_vars (bind, cond);
2224 gimple_call_set_lhs (g, cond);
2225 gimple_seq_add_stmt (&seq, g);
2226 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2227 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2228 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2229 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2230 gimple_seq_add_stmt (&seq, g);
2231 g = gimple_build_label (lab1);
2232 gimple_seq_add_stmt (&seq, g);
2233 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2234 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2235 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2236 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2237 gimple_omp_for_set_clauses (new_stmt, clause);
2238 gimple_seq_add_stmt (&seq, new_stmt);
2239 g = gimple_build_goto (lab3);
2240 gimple_seq_add_stmt (&seq, g);
2241 g = gimple_build_label (lab2);
2242 gimple_seq_add_stmt (&seq, g);
2243 gimple_seq_add_stmt (&seq, stmt);
2244 g = gimple_build_label (lab3);
2245 gimple_seq_add_stmt (&seq, g);
2246 gimple_bind_set_body (bind, seq);
2247 update_stmt (bind);
2248 scan_omp_for (new_stmt, outer_ctx);
2249 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2252 /* Scan an OpenMP sections directive. */
2254 static void
2255 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2257 omp_context *ctx;
2259 ctx = new_omp_context (stmt, outer_ctx);
2260 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2261 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2264 /* Scan an OpenMP single directive. */
2266 static void
2267 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2269 omp_context *ctx;
2270 tree name;
2272 ctx = new_omp_context (stmt, outer_ctx);
2273 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2274 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2275 name = create_tmp_var_name (".omp_copy_s");
2276 name = build_decl (gimple_location (stmt),
2277 TYPE_DECL, name, ctx->record_type);
2278 TYPE_NAME (ctx->record_type) = name;
2280 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2281 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2283 if (TYPE_FIELDS (ctx->record_type) == NULL)
2284 ctx->record_type = NULL;
2285 else
2286 layout_type (ctx->record_type);
2289 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2290 used in the corresponding offloaded function are restrict. */
2292 static bool
2293 omp_target_base_pointers_restrict_p (tree clauses)
2295 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2296 used by OpenACC. */
2297 if (flag_openacc == 0)
2298 return false;
2300 /* I. Basic example:
2302 void foo (void)
2304 unsigned int a[2], b[2];
2306 #pragma acc kernels \
2307 copyout (a) \
2308 copyout (b)
2310 a[0] = 0;
2311 b[0] = 1;
2315 After gimplification, we have:
2317 #pragma omp target oacc_kernels \
2318 map(force_from:a [len: 8]) \
2319 map(force_from:b [len: 8])
2321 a[0] = 0;
2322 b[0] = 1;
2325 Because both mappings have the force prefix, we know that they will be
2326 allocated when calling the corresponding offloaded function, which means we
2327 can mark the base pointers for a and b in the offloaded function as
2328 restrict. */
2330 tree c;
2331 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2333 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2334 return false;
2336 switch (OMP_CLAUSE_MAP_KIND (c))
2338 case GOMP_MAP_FORCE_ALLOC:
2339 case GOMP_MAP_FORCE_TO:
2340 case GOMP_MAP_FORCE_FROM:
2341 case GOMP_MAP_FORCE_TOFROM:
2342 break;
2343 default:
2344 return false;
2348 return true;
2351 /* Scan a GIMPLE_OMP_TARGET. */
2353 static void
2354 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2356 omp_context *ctx;
2357 tree name;
2358 bool offloaded = is_gimple_omp_offloaded (stmt);
2359 tree clauses = gimple_omp_target_clauses (stmt);
2361 ctx = new_omp_context (stmt, outer_ctx);
2362 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2363 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2364 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2365 name = create_tmp_var_name (".omp_data_t");
2366 name = build_decl (gimple_location (stmt),
2367 TYPE_DECL, name, ctx->record_type);
2368 DECL_ARTIFICIAL (name) = 1;
2369 DECL_NAMELESS (name) = 1;
2370 TYPE_NAME (ctx->record_type) = name;
2371 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2373 bool base_pointers_restrict = false;
2374 if (offloaded)
2376 create_omp_child_function (ctx, false);
2377 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2379 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2380 if (base_pointers_restrict
2381 && dump_file && (dump_flags & TDF_DETAILS))
2382 fprintf (dump_file,
2383 "Base pointers in offloaded function are restrict\n");
2386 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2387 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2389 if (TYPE_FIELDS (ctx->record_type) == NULL)
2390 ctx->record_type = ctx->receiver_decl = NULL;
2391 else
2393 TYPE_FIELDS (ctx->record_type)
2394 = nreverse (TYPE_FIELDS (ctx->record_type));
2395 if (flag_checking)
2397 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2398 for (tree field = TYPE_FIELDS (ctx->record_type);
2399 field;
2400 field = DECL_CHAIN (field))
2401 gcc_assert (DECL_ALIGN (field) == align);
2403 layout_type (ctx->record_type);
2404 if (offloaded)
2405 fixup_child_record_type (ctx);
2409 /* Scan an OpenMP teams directive. */
2411 static void
2412 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2414 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2415 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2416 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2419 /* Check nesting restrictions. */
2420 static bool
2421 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2423 tree c;
2425 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2426 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2427 the original copy of its contents. */
2428 return true;
2430 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2431 inside an OpenACC CTX. */
2432 if (!(is_gimple_omp (stmt)
2433 && is_gimple_omp_oacc (stmt))
2434 /* Except for atomic codes that we share with OpenMP. */
2435 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2436 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2438 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2440 error_at (gimple_location (stmt),
2441 "non-OpenACC construct inside of OpenACC routine");
2442 return false;
2444 else
2445 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2446 if (is_gimple_omp (octx->stmt)
2447 && is_gimple_omp_oacc (octx->stmt))
2449 error_at (gimple_location (stmt),
2450 "non-OpenACC construct inside of OpenACC region");
2451 return false;
2455 if (ctx != NULL)
2457 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2458 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2460 c = NULL_TREE;
2461 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2463 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2464 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2466 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2467 && (ctx->outer == NULL
2468 || !gimple_omp_for_combined_into_p (ctx->stmt)
2469 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2470 || (gimple_omp_for_kind (ctx->outer->stmt)
2471 != GF_OMP_FOR_KIND_FOR)
2472 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2474 error_at (gimple_location (stmt),
2475 "%<ordered simd threads%> must be closely "
2476 "nested inside of %<for simd%> region");
2477 return false;
2479 return true;
2482 error_at (gimple_location (stmt),
2483 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2484 " may not be nested inside %<simd%> region");
2485 return false;
2487 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2489 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2490 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2491 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2492 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2494 error_at (gimple_location (stmt),
2495 "only %<distribute%> or %<parallel%> regions are "
2496 "allowed to be strictly nested inside %<teams%> "
2497 "region");
2498 return false;
2502 switch (gimple_code (stmt))
2504 case GIMPLE_OMP_FOR:
2505 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2506 return true;
2507 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2509 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2511 error_at (gimple_location (stmt),
2512 "%<distribute%> region must be strictly nested "
2513 "inside %<teams%> construct");
2514 return false;
2516 return true;
2518 /* We split taskloop into task and nested taskloop in it. */
2519 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2520 return true;
2521 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2523 bool ok = false;
2525 if (ctx)
2526 switch (gimple_code (ctx->stmt))
2528 case GIMPLE_OMP_FOR:
2529 ok = (gimple_omp_for_kind (ctx->stmt)
2530 == GF_OMP_FOR_KIND_OACC_LOOP);
2531 break;
2533 case GIMPLE_OMP_TARGET:
2534 switch (gimple_omp_target_kind (ctx->stmt))
2536 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2537 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2538 ok = true;
2539 break;
2541 default:
2542 break;
2545 default:
2546 break;
2548 else if (oacc_get_fn_attrib (current_function_decl))
2549 ok = true;
2550 if (!ok)
2552 error_at (gimple_location (stmt),
2553 "OpenACC loop directive must be associated with"
2554 " an OpenACC compute region");
2555 return false;
2558 /* FALLTHRU */
2559 case GIMPLE_CALL:
2560 if (is_gimple_call (stmt)
2561 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2562 == BUILT_IN_GOMP_CANCEL
2563 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2564 == BUILT_IN_GOMP_CANCELLATION_POINT))
2566 const char *bad = NULL;
2567 const char *kind = NULL;
2568 const char *construct
2569 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2570 == BUILT_IN_GOMP_CANCEL)
2571 ? "#pragma omp cancel"
2572 : "#pragma omp cancellation point";
2573 if (ctx == NULL)
2575 error_at (gimple_location (stmt), "orphaned %qs construct",
2576 construct);
2577 return false;
2579 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2580 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2581 : 0)
2583 case 1:
2584 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2585 bad = "#pragma omp parallel";
2586 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2587 == BUILT_IN_GOMP_CANCEL
2588 && !integer_zerop (gimple_call_arg (stmt, 1)))
2589 ctx->cancellable = true;
2590 kind = "parallel";
2591 break;
2592 case 2:
2593 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2594 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2595 bad = "#pragma omp for";
2596 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2597 == BUILT_IN_GOMP_CANCEL
2598 && !integer_zerop (gimple_call_arg (stmt, 1)))
2600 ctx->cancellable = true;
2601 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2602 OMP_CLAUSE_NOWAIT))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel for%> inside "
2605 "%<nowait%> for construct");
2606 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2607 OMP_CLAUSE_ORDERED))
2608 warning_at (gimple_location (stmt), 0,
2609 "%<#pragma omp cancel for%> inside "
2610 "%<ordered%> for construct");
2612 kind = "for";
2613 break;
2614 case 4:
2615 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2616 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2617 bad = "#pragma omp sections";
2618 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2619 == BUILT_IN_GOMP_CANCEL
2620 && !integer_zerop (gimple_call_arg (stmt, 1)))
2622 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2624 ctx->cancellable = true;
2625 if (omp_find_clause (gimple_omp_sections_clauses
2626 (ctx->stmt),
2627 OMP_CLAUSE_NOWAIT))
2628 warning_at (gimple_location (stmt), 0,
2629 "%<#pragma omp cancel sections%> inside "
2630 "%<nowait%> sections construct");
2632 else
2634 gcc_assert (ctx->outer
2635 && gimple_code (ctx->outer->stmt)
2636 == GIMPLE_OMP_SECTIONS);
2637 ctx->outer->cancellable = true;
2638 if (omp_find_clause (gimple_omp_sections_clauses
2639 (ctx->outer->stmt),
2640 OMP_CLAUSE_NOWAIT))
2641 warning_at (gimple_location (stmt), 0,
2642 "%<#pragma omp cancel sections%> inside "
2643 "%<nowait%> sections construct");
2646 kind = "sections";
2647 break;
2648 case 8:
2649 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2650 bad = "#pragma omp task";
2651 else
2653 for (omp_context *octx = ctx->outer;
2654 octx; octx = octx->outer)
2656 switch (gimple_code (octx->stmt))
2658 case GIMPLE_OMP_TASKGROUP:
2659 break;
2660 case GIMPLE_OMP_TARGET:
2661 if (gimple_omp_target_kind (octx->stmt)
2662 != GF_OMP_TARGET_KIND_REGION)
2663 continue;
2664 /* FALLTHRU */
2665 case GIMPLE_OMP_PARALLEL:
2666 case GIMPLE_OMP_TEAMS:
2667 error_at (gimple_location (stmt),
2668 "%<%s taskgroup%> construct not closely "
2669 "nested inside of %<taskgroup%> region",
2670 construct);
2671 return false;
2672 default:
2673 continue;
2675 break;
2677 ctx->cancellable = true;
2679 kind = "taskgroup";
2680 break;
2681 default:
2682 error_at (gimple_location (stmt), "invalid arguments");
2683 return false;
2685 if (bad)
2687 error_at (gimple_location (stmt),
2688 "%<%s %s%> construct not closely nested inside of %qs",
2689 construct, kind, bad);
2690 return false;
2693 /* FALLTHRU */
2694 case GIMPLE_OMP_SECTIONS:
2695 case GIMPLE_OMP_SINGLE:
2696 for (; ctx != NULL; ctx = ctx->outer)
2697 switch (gimple_code (ctx->stmt))
2699 case GIMPLE_OMP_FOR:
2700 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2701 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2702 break;
2703 /* FALLTHRU */
2704 case GIMPLE_OMP_SECTIONS:
2705 case GIMPLE_OMP_SINGLE:
2706 case GIMPLE_OMP_ORDERED:
2707 case GIMPLE_OMP_MASTER:
2708 case GIMPLE_OMP_TASK:
2709 case GIMPLE_OMP_CRITICAL:
2710 if (is_gimple_call (stmt))
2712 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2713 != BUILT_IN_GOMP_BARRIER)
2714 return true;
2715 error_at (gimple_location (stmt),
2716 "barrier region may not be closely nested inside "
2717 "of work-sharing, %<critical%>, %<ordered%>, "
2718 "%<master%>, explicit %<task%> or %<taskloop%> "
2719 "region");
2720 return false;
2722 error_at (gimple_location (stmt),
2723 "work-sharing region may not be closely nested inside "
2724 "of work-sharing, %<critical%>, %<ordered%>, "
2725 "%<master%>, explicit %<task%> or %<taskloop%> region");
2726 return false;
2727 case GIMPLE_OMP_PARALLEL:
2728 case GIMPLE_OMP_TEAMS:
2729 return true;
2730 case GIMPLE_OMP_TARGET:
2731 if (gimple_omp_target_kind (ctx->stmt)
2732 == GF_OMP_TARGET_KIND_REGION)
2733 return true;
2734 break;
2735 default:
2736 break;
2738 break;
2739 case GIMPLE_OMP_MASTER:
2740 for (; ctx != NULL; ctx = ctx->outer)
2741 switch (gimple_code (ctx->stmt))
2743 case GIMPLE_OMP_FOR:
2744 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2745 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2746 break;
2747 /* FALLTHRU */
2748 case GIMPLE_OMP_SECTIONS:
2749 case GIMPLE_OMP_SINGLE:
2750 case GIMPLE_OMP_TASK:
2751 error_at (gimple_location (stmt),
2752 "%<master%> region may not be closely nested inside "
2753 "of work-sharing, explicit %<task%> or %<taskloop%> "
2754 "region");
2755 return false;
2756 case GIMPLE_OMP_PARALLEL:
2757 case GIMPLE_OMP_TEAMS:
2758 return true;
2759 case GIMPLE_OMP_TARGET:
2760 if (gimple_omp_target_kind (ctx->stmt)
2761 == GF_OMP_TARGET_KIND_REGION)
2762 return true;
2763 break;
2764 default:
2765 break;
2767 break;
2768 case GIMPLE_OMP_TASK:
2769 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2770 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2771 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2772 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2774 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2775 error_at (OMP_CLAUSE_LOCATION (c),
2776 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2777 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2778 return false;
2780 break;
2781 case GIMPLE_OMP_ORDERED:
2782 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2783 c; c = OMP_CLAUSE_CHAIN (c))
2785 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2787 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2788 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2789 continue;
2791 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2792 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2793 || kind == OMP_CLAUSE_DEPEND_SINK)
2795 tree oclause;
2796 /* Look for containing ordered(N) loop. */
2797 if (ctx == NULL
2798 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2799 || (oclause
2800 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2801 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2803 error_at (OMP_CLAUSE_LOCATION (c),
2804 "%<ordered%> construct with %<depend%> clause "
2805 "must be closely nested inside an %<ordered%> "
2806 "loop");
2807 return false;
2809 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2811 error_at (OMP_CLAUSE_LOCATION (c),
2812 "%<ordered%> construct with %<depend%> clause "
2813 "must be closely nested inside a loop with "
2814 "%<ordered%> clause with a parameter");
2815 return false;
2818 else
2820 error_at (OMP_CLAUSE_LOCATION (c),
2821 "invalid depend kind in omp %<ordered%> %<depend%>");
2822 return false;
2825 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2826 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2828 /* ordered simd must be closely nested inside of simd region,
2829 and simd region must not encounter constructs other than
2830 ordered simd, therefore ordered simd may be either orphaned,
2831 or ctx->stmt must be simd. The latter case is handled already
2832 earlier. */
2833 if (ctx != NULL)
2835 error_at (gimple_location (stmt),
2836 "%<ordered%> %<simd%> must be closely nested inside "
2837 "%<simd%> region");
2838 return false;
2841 for (; ctx != NULL; ctx = ctx->outer)
2842 switch (gimple_code (ctx->stmt))
2844 case GIMPLE_OMP_CRITICAL:
2845 case GIMPLE_OMP_TASK:
2846 case GIMPLE_OMP_ORDERED:
2847 ordered_in_taskloop:
2848 error_at (gimple_location (stmt),
2849 "%<ordered%> region may not be closely nested inside "
2850 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2851 "%<taskloop%> region");
2852 return false;
2853 case GIMPLE_OMP_FOR:
2854 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2855 goto ordered_in_taskloop;
2856 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2857 OMP_CLAUSE_ORDERED) == NULL)
2859 error_at (gimple_location (stmt),
2860 "%<ordered%> region must be closely nested inside "
2861 "a loop region with an %<ordered%> clause");
2862 return false;
2864 return true;
2865 case GIMPLE_OMP_TARGET:
2866 if (gimple_omp_target_kind (ctx->stmt)
2867 != GF_OMP_TARGET_KIND_REGION)
2868 break;
2869 /* FALLTHRU */
2870 case GIMPLE_OMP_PARALLEL:
2871 case GIMPLE_OMP_TEAMS:
2872 error_at (gimple_location (stmt),
2873 "%<ordered%> region must be closely nested inside "
2874 "a loop region with an %<ordered%> clause");
2875 return false;
2876 default:
2877 break;
2879 break;
2880 case GIMPLE_OMP_CRITICAL:
2882 tree this_stmt_name
2883 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2884 for (; ctx != NULL; ctx = ctx->outer)
2885 if (gomp_critical *other_crit
2886 = dyn_cast <gomp_critical *> (ctx->stmt))
2887 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2889 error_at (gimple_location (stmt),
2890 "%<critical%> region may not be nested inside "
2891 "a %<critical%> region with the same name");
2892 return false;
2895 break;
2896 case GIMPLE_OMP_TEAMS:
2897 if (ctx == NULL
2898 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2899 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2901 error_at (gimple_location (stmt),
2902 "%<teams%> construct not closely nested inside of "
2903 "%<target%> construct");
2904 return false;
2906 break;
2907 case GIMPLE_OMP_TARGET:
2908 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2910 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2911 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2913 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2914 error_at (OMP_CLAUSE_LOCATION (c),
2915 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2916 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2917 return false;
2919 if (is_gimple_omp_offloaded (stmt)
2920 && oacc_get_fn_attrib (cfun->decl) != NULL)
2922 error_at (gimple_location (stmt),
2923 "OpenACC region inside of OpenACC routine, nested "
2924 "parallelism not supported yet");
2925 return false;
2927 for (; ctx != NULL; ctx = ctx->outer)
2929 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2931 if (is_gimple_omp (stmt)
2932 && is_gimple_omp_oacc (stmt)
2933 && is_gimple_omp (ctx->stmt))
2935 error_at (gimple_location (stmt),
2936 "OpenACC construct inside of non-OpenACC region");
2937 return false;
2939 continue;
2942 const char *stmt_name, *ctx_stmt_name;
2943 switch (gimple_omp_target_kind (stmt))
2945 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2946 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2947 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2948 case GF_OMP_TARGET_KIND_ENTER_DATA:
2949 stmt_name = "target enter data"; break;
2950 case GF_OMP_TARGET_KIND_EXIT_DATA:
2951 stmt_name = "target exit data"; break;
2952 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2953 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2954 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2955 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2956 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2957 stmt_name = "enter/exit data"; break;
2958 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2959 break;
2960 default: gcc_unreachable ();
2962 switch (gimple_omp_target_kind (ctx->stmt))
2964 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2965 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2966 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2967 ctx_stmt_name = "parallel"; break;
2968 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2969 ctx_stmt_name = "kernels"; break;
2970 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2971 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2972 ctx_stmt_name = "host_data"; break;
2973 default: gcc_unreachable ();
2976 /* OpenACC/OpenMP mismatch? */
2977 if (is_gimple_omp_oacc (stmt)
2978 != is_gimple_omp_oacc (ctx->stmt))
2980 error_at (gimple_location (stmt),
2981 "%s %qs construct inside of %s %qs region",
2982 (is_gimple_omp_oacc (stmt)
2983 ? "OpenACC" : "OpenMP"), stmt_name,
2984 (is_gimple_omp_oacc (ctx->stmt)
2985 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2986 return false;
2988 if (is_gimple_omp_offloaded (ctx->stmt))
2990 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2991 if (is_gimple_omp_oacc (ctx->stmt))
2993 error_at (gimple_location (stmt),
2994 "%qs construct inside of %qs region",
2995 stmt_name, ctx_stmt_name);
2996 return false;
2998 else
3000 warning_at (gimple_location (stmt), 0,
3001 "%qs construct inside of %qs region",
3002 stmt_name, ctx_stmt_name);
3006 break;
3007 default:
3008 break;
3010 return true;
3014 /* Helper function scan_omp.
3016 Callback for walk_tree or operators in walk_gimple_stmt used to
3017 scan for OMP directives in TP. */
3019 static tree
3020 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3022 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3023 omp_context *ctx = (omp_context *) wi->info;
3024 tree t = *tp;
3026 switch (TREE_CODE (t))
3028 case VAR_DECL:
3029 case PARM_DECL:
3030 case LABEL_DECL:
3031 case RESULT_DECL:
3032 if (ctx)
3034 tree repl = remap_decl (t, &ctx->cb);
3035 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3036 *tp = repl;
3038 break;
3040 default:
3041 if (ctx && TYPE_P (t))
3042 *tp = remap_type (t, &ctx->cb);
3043 else if (!DECL_P (t))
3045 *walk_subtrees = 1;
3046 if (ctx)
3048 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3049 if (tem != TREE_TYPE (t))
3051 if (TREE_CODE (t) == INTEGER_CST)
3052 *tp = wide_int_to_tree (tem, t);
3053 else
3054 TREE_TYPE (t) = tem;
3058 break;
3061 return NULL_TREE;
3064 /* Return true if FNDECL is a setjmp or a longjmp. */
3066 static bool
3067 setjmp_or_longjmp_p (const_tree fndecl)
3069 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3070 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3071 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3072 return true;
3074 tree declname = DECL_NAME (fndecl);
3075 if (!declname)
3076 return false;
3077 const char *name = IDENTIFIER_POINTER (declname);
3078 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3082 /* Helper function for scan_omp.
3084 Callback for walk_gimple_stmt used to scan for OMP directives in
3085 the current statement in GSI. */
3087 static tree
3088 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3089 struct walk_stmt_info *wi)
3091 gimple *stmt = gsi_stmt (*gsi);
3092 omp_context *ctx = (omp_context *) wi->info;
3094 if (gimple_has_location (stmt))
3095 input_location = gimple_location (stmt);
3097 /* Check the nesting restrictions. */
3098 bool remove = false;
3099 if (is_gimple_omp (stmt))
3100 remove = !check_omp_nesting_restrictions (stmt, ctx);
3101 else if (is_gimple_call (stmt))
3103 tree fndecl = gimple_call_fndecl (stmt);
3104 if (fndecl)
3106 if (setjmp_or_longjmp_p (fndecl)
3107 && ctx
3108 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3109 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3111 remove = true;
3112 error_at (gimple_location (stmt),
3113 "setjmp/longjmp inside simd construct");
3115 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3116 switch (DECL_FUNCTION_CODE (fndecl))
3118 case BUILT_IN_GOMP_BARRIER:
3119 case BUILT_IN_GOMP_CANCEL:
3120 case BUILT_IN_GOMP_CANCELLATION_POINT:
3121 case BUILT_IN_GOMP_TASKYIELD:
3122 case BUILT_IN_GOMP_TASKWAIT:
3123 case BUILT_IN_GOMP_TASKGROUP_START:
3124 case BUILT_IN_GOMP_TASKGROUP_END:
3125 remove = !check_omp_nesting_restrictions (stmt, ctx);
3126 break;
3127 default:
3128 break;
3132 if (remove)
3134 stmt = gimple_build_nop ();
3135 gsi_replace (gsi, stmt, false);
3138 *handled_ops_p = true;
3140 switch (gimple_code (stmt))
3142 case GIMPLE_OMP_PARALLEL:
3143 taskreg_nesting_level++;
3144 scan_omp_parallel (gsi, ctx);
3145 taskreg_nesting_level--;
3146 break;
3148 case GIMPLE_OMP_TASK:
3149 taskreg_nesting_level++;
3150 scan_omp_task (gsi, ctx);
3151 taskreg_nesting_level--;
3152 break;
3154 case GIMPLE_OMP_FOR:
3155 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3156 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3157 && omp_maybe_offloaded_ctx (ctx)
3158 && omp_max_simt_vf ())
3159 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3160 else
3161 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3162 break;
3164 case GIMPLE_OMP_SECTIONS:
3165 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3166 break;
3168 case GIMPLE_OMP_SINGLE:
3169 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3170 break;
3172 case GIMPLE_OMP_SECTION:
3173 case GIMPLE_OMP_MASTER:
3174 case GIMPLE_OMP_TASKGROUP:
3175 case GIMPLE_OMP_ORDERED:
3176 case GIMPLE_OMP_CRITICAL:
3177 case GIMPLE_OMP_GRID_BODY:
3178 ctx = new_omp_context (stmt, ctx);
3179 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3180 break;
3182 case GIMPLE_OMP_TARGET:
3183 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3184 break;
3186 case GIMPLE_OMP_TEAMS:
3187 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3188 break;
3190 case GIMPLE_BIND:
3192 tree var;
3194 *handled_ops_p = false;
3195 if (ctx)
3196 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3197 var ;
3198 var = DECL_CHAIN (var))
3199 insert_decl_map (&ctx->cb, var, var);
3201 break;
3202 default:
3203 *handled_ops_p = false;
3204 break;
3207 return NULL_TREE;
3211 /* Scan all the statements starting at the current statement. CTX
3212 contains context information about the OMP directives and
3213 clauses found during the scan. */
3215 static void
3216 scan_omp (gimple_seq *body_p, omp_context *ctx)
3218 location_t saved_location;
3219 struct walk_stmt_info wi;
3221 memset (&wi, 0, sizeof (wi));
3222 wi.info = ctx;
3223 wi.want_locations = true;
3225 saved_location = input_location;
3226 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3227 input_location = saved_location;
3230 /* Re-gimplification and code generation routines. */
3232 /* If a context was created for STMT when it was scanned, return it. */
3234 static omp_context *
3235 maybe_lookup_ctx (gimple *stmt)
3237 splay_tree_node n;
3238 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3239 return n ? (omp_context *) n->value : NULL;
3243 /* Find the mapping for DECL in CTX or the immediately enclosing
3244 context that has a mapping for DECL.
3246 If CTX is a nested parallel directive, we may have to use the decl
3247 mappings created in CTX's parent context. Suppose that we have the
3248 following parallel nesting (variable UIDs showed for clarity):
3250 iD.1562 = 0;
3251 #omp parallel shared(iD.1562) -> outer parallel
3252 iD.1562 = iD.1562 + 1;
3254 #omp parallel shared (iD.1562) -> inner parallel
3255 iD.1562 = iD.1562 - 1;
3257 Each parallel structure will create a distinct .omp_data_s structure
3258 for copying iD.1562 in/out of the directive:
3260 outer parallel .omp_data_s.1.i -> iD.1562
3261 inner parallel .omp_data_s.2.i -> iD.1562
3263 A shared variable mapping will produce a copy-out operation before
3264 the parallel directive and a copy-in operation after it. So, in
3265 this case we would have:
3267 iD.1562 = 0;
3268 .omp_data_o.1.i = iD.1562;
3269 #omp parallel shared(iD.1562) -> outer parallel
3270 .omp_data_i.1 = &.omp_data_o.1
3271 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3273 .omp_data_o.2.i = iD.1562; -> **
3274 #omp parallel shared(iD.1562) -> inner parallel
3275 .omp_data_i.2 = &.omp_data_o.2
3276 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3279 ** This is a problem. The symbol iD.1562 cannot be referenced
3280 inside the body of the outer parallel region. But since we are
3281 emitting this copy operation while expanding the inner parallel
3282 directive, we need to access the CTX structure of the outer
3283 parallel directive to get the correct mapping:
3285 .omp_data_o.2.i = .omp_data_i.1->i
3287 Since there may be other workshare or parallel directives enclosing
3288 the parallel directive, it may be necessary to walk up the context
3289 parent chain. This is not a problem in general because nested
3290 parallelism happens only rarely. */
3292 static tree
3293 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3295 tree t;
3296 omp_context *up;
3298 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3299 t = maybe_lookup_decl (decl, up);
3301 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3303 return t ? t : decl;
3307 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3308 in outer contexts. */
3310 static tree
3311 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3313 tree t = NULL;
3314 omp_context *up;
3316 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3317 t = maybe_lookup_decl (decl, up);
3319 return t ? t : decl;
3323 /* Construct the initialization value for reduction operation OP. */
3325 tree
3326 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3328 switch (op)
3330 case PLUS_EXPR:
3331 case MINUS_EXPR:
3332 case BIT_IOR_EXPR:
3333 case BIT_XOR_EXPR:
3334 case TRUTH_OR_EXPR:
3335 case TRUTH_ORIF_EXPR:
3336 case TRUTH_XOR_EXPR:
3337 case NE_EXPR:
3338 return build_zero_cst (type);
3340 case MULT_EXPR:
3341 case TRUTH_AND_EXPR:
3342 case TRUTH_ANDIF_EXPR:
3343 case EQ_EXPR:
3344 return fold_convert_loc (loc, type, integer_one_node);
3346 case BIT_AND_EXPR:
3347 return fold_convert_loc (loc, type, integer_minus_one_node);
3349 case MAX_EXPR:
3350 if (SCALAR_FLOAT_TYPE_P (type))
3352 REAL_VALUE_TYPE max, min;
3353 if (HONOR_INFINITIES (type))
3355 real_inf (&max);
3356 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3358 else
3359 real_maxval (&min, 1, TYPE_MODE (type));
3360 return build_real (type, min);
3362 else if (POINTER_TYPE_P (type))
3364 wide_int min
3365 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3366 return wide_int_to_tree (type, min);
3368 else
3370 gcc_assert (INTEGRAL_TYPE_P (type));
3371 return TYPE_MIN_VALUE (type);
3374 case MIN_EXPR:
3375 if (SCALAR_FLOAT_TYPE_P (type))
3377 REAL_VALUE_TYPE max;
3378 if (HONOR_INFINITIES (type))
3379 real_inf (&max);
3380 else
3381 real_maxval (&max, 0, TYPE_MODE (type));
3382 return build_real (type, max);
3384 else if (POINTER_TYPE_P (type))
3386 wide_int max
3387 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3388 return wide_int_to_tree (type, max);
3390 else
3392 gcc_assert (INTEGRAL_TYPE_P (type));
3393 return TYPE_MAX_VALUE (type);
3396 default:
3397 gcc_unreachable ();
3401 /* Construct the initialization value for reduction CLAUSE. */
3403 tree
3404 omp_reduction_init (tree clause, tree type)
3406 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3407 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3410 /* Return alignment to be assumed for var in CLAUSE, which should be
3411 OMP_CLAUSE_ALIGNED. */
3413 static tree
3414 omp_clause_aligned_alignment (tree clause)
3416 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3417 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3419 /* Otherwise return implementation defined alignment. */
3420 unsigned int al = 1;
3421 machine_mode mode, vmode;
3422 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3423 if (vs)
3424 vs = 1 << floor_log2 (vs);
3425 static enum mode_class classes[]
3426 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3427 for (int i = 0; i < 4; i += 2)
3428 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3429 mode != VOIDmode;
3430 mode = GET_MODE_WIDER_MODE (mode))
3432 vmode = targetm.vectorize.preferred_simd_mode (mode);
3433 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3434 continue;
3435 while (vs
3436 && GET_MODE_SIZE (vmode) < vs
3437 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3438 vmode = GET_MODE_2XWIDER_MODE (vmode);
3440 tree type = lang_hooks.types.type_for_mode (mode, 1);
3441 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3442 continue;
3443 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3444 / GET_MODE_SIZE (mode));
3445 if (TYPE_MODE (type) != vmode)
3446 continue;
3447 if (TYPE_ALIGN_UNIT (type) > al)
3448 al = TYPE_ALIGN_UNIT (type);
3450 return build_int_cst (integer_type_node, al);
3454 /* This structure is part of the interface between lower_rec_simd_input_clauses
3455 and lower_rec_input_clauses. */
3457 struct omplow_simd_context {
3458 tree idx;
3459 tree lane;
3460 int max_vf;
3461 bool is_simt;
3464 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3465 privatization. */
3467 static bool
3468 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3469 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3471 if (sctx->max_vf == 0)
3473 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3474 if (sctx->max_vf > 1)
3476 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3477 OMP_CLAUSE_SAFELEN);
3478 if (c
3479 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3480 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3481 sctx->max_vf = 1;
3482 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3483 sctx->max_vf) == -1)
3484 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3486 if (sctx->max_vf > 1)
3488 sctx->idx = create_tmp_var (unsigned_type_node);
3489 sctx->lane = create_tmp_var (unsigned_type_node);
3492 if (sctx->max_vf == 1)
3493 return false;
3495 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3496 tree avar = create_tmp_var_raw (atype);
3497 if (TREE_ADDRESSABLE (new_var))
3498 TREE_ADDRESSABLE (avar) = 1;
3499 DECL_ATTRIBUTES (avar)
3500 = tree_cons (get_identifier ("omp simd array"), NULL,
3501 DECL_ATTRIBUTES (avar));
3502 gimple_add_tmp_var (avar);
3503 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3504 NULL_TREE, NULL_TREE);
3505 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3506 NULL_TREE, NULL_TREE);
3507 if (DECL_P (new_var))
3509 SET_DECL_VALUE_EXPR (new_var, lvar);
3510 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3512 return true;
3515 /* Helper function of lower_rec_input_clauses. For a reference
3516 in simd reduction, add an underlying variable it will reference. */
3518 static void
3519 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3521 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3522 if (TREE_CONSTANT (z))
3524 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3525 get_name (new_vard));
3526 gimple_add_tmp_var (z);
3527 TREE_ADDRESSABLE (z) = 1;
3528 z = build_fold_addr_expr_loc (loc, z);
3529 gimplify_assign (new_vard, z, ilist);
3533 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3534 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3535 private variables. Initialization statements go in ILIST, while calls
3536 to destructors go in DLIST. */
3538 static void
3539 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3540 omp_context *ctx, struct omp_for_data *fd)
3542 tree c, dtor, copyin_seq, x, ptr;
3543 bool copyin_by_ref = false;
3544 bool lastprivate_firstprivate = false;
3545 bool reduction_omp_orig_ref = false;
3546 int pass;
3547 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3548 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3549 omplow_simd_context sctx = omplow_simd_context ();
3550 tree simt_lane = NULL_TREE;
3551 tree ivar = NULL_TREE, lvar = NULL_TREE;
3552 gimple_seq llist[3] = { };
3554 copyin_seq = NULL;
3555 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3557 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3558 with data sharing clauses referencing variable sized vars. That
3559 is unnecessarily hard to support and very unlikely to result in
3560 vectorized code anyway. */
3561 if (is_simd)
3562 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3563 switch (OMP_CLAUSE_CODE (c))
3565 case OMP_CLAUSE_LINEAR:
3566 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3567 sctx.max_vf = 1;
3568 /* FALLTHRU */
3569 case OMP_CLAUSE_PRIVATE:
3570 case OMP_CLAUSE_FIRSTPRIVATE:
3571 case OMP_CLAUSE_LASTPRIVATE:
3572 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3573 sctx.max_vf = 1;
3574 break;
3575 case OMP_CLAUSE_REDUCTION:
3576 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3577 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3578 sctx.max_vf = 1;
3579 break;
3580 default:
3581 continue;
3584 /* Do all the fixed sized types in the first pass, and the variable sized
3585 types in the second pass. This makes sure that the scalar arguments to
3586 the variable sized types are processed before we use them in the
3587 variable sized operations. */
3588 for (pass = 0; pass < 2; ++pass)
3590 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3592 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3593 tree var, new_var;
3594 bool by_ref;
3595 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3597 switch (c_kind)
3599 case OMP_CLAUSE_PRIVATE:
3600 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3601 continue;
3602 break;
3603 case OMP_CLAUSE_SHARED:
3604 /* Ignore shared directives in teams construct. */
3605 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3606 continue;
3607 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3609 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3610 || is_global_var (OMP_CLAUSE_DECL (c)));
3611 continue;
3613 case OMP_CLAUSE_FIRSTPRIVATE:
3614 case OMP_CLAUSE_COPYIN:
3615 break;
3616 case OMP_CLAUSE_LINEAR:
3617 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3618 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3619 lastprivate_firstprivate = true;
3620 break;
3621 case OMP_CLAUSE_REDUCTION:
3622 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3623 reduction_omp_orig_ref = true;
3624 break;
3625 case OMP_CLAUSE__LOOPTEMP_:
3626 /* Handle _looptemp_ clauses only on parallel/task. */
3627 if (fd)
3628 continue;
3629 break;
3630 case OMP_CLAUSE_LASTPRIVATE:
3631 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3633 lastprivate_firstprivate = true;
3634 if (pass != 0 || is_taskloop_ctx (ctx))
3635 continue;
3637 /* Even without corresponding firstprivate, if
3638 decl is Fortran allocatable, it needs outer var
3639 reference. */
3640 else if (pass == 0
3641 && lang_hooks.decls.omp_private_outer_ref
3642 (OMP_CLAUSE_DECL (c)))
3643 lastprivate_firstprivate = true;
3644 break;
3645 case OMP_CLAUSE_ALIGNED:
3646 if (pass == 0)
3647 continue;
3648 var = OMP_CLAUSE_DECL (c);
3649 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3650 && !is_global_var (var))
3652 new_var = maybe_lookup_decl (var, ctx);
3653 if (new_var == NULL_TREE)
3654 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3655 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3656 tree alarg = omp_clause_aligned_alignment (c);
3657 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3658 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3659 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3660 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3661 gimplify_and_add (x, ilist);
3663 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3664 && is_global_var (var))
3666 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3667 new_var = lookup_decl (var, ctx);
3668 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3669 t = build_fold_addr_expr_loc (clause_loc, t);
3670 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3671 tree alarg = omp_clause_aligned_alignment (c);
3672 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3673 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3674 t = fold_convert_loc (clause_loc, ptype, t);
3675 x = create_tmp_var (ptype);
3676 t = build2 (MODIFY_EXPR, ptype, x, t);
3677 gimplify_and_add (t, ilist);
3678 t = build_simple_mem_ref_loc (clause_loc, x);
3679 SET_DECL_VALUE_EXPR (new_var, t);
3680 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3682 continue;
3683 default:
3684 continue;
3687 new_var = var = OMP_CLAUSE_DECL (c);
3688 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3690 var = TREE_OPERAND (var, 0);
3691 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3692 var = TREE_OPERAND (var, 0);
3693 if (TREE_CODE (var) == INDIRECT_REF
3694 || TREE_CODE (var) == ADDR_EXPR)
3695 var = TREE_OPERAND (var, 0);
3696 if (is_variable_sized (var))
3698 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3699 var = DECL_VALUE_EXPR (var);
3700 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3701 var = TREE_OPERAND (var, 0);
3702 gcc_assert (DECL_P (var));
3704 new_var = var;
3706 if (c_kind != OMP_CLAUSE_COPYIN)
3707 new_var = lookup_decl (var, ctx);
3709 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3711 if (pass != 0)
3712 continue;
3714 /* C/C++ array section reductions. */
3715 else if (c_kind == OMP_CLAUSE_REDUCTION
3716 && var != OMP_CLAUSE_DECL (c))
3718 if (pass == 0)
3719 continue;
3721 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3722 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3723 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3725 tree b = TREE_OPERAND (orig_var, 1);
3726 b = maybe_lookup_decl (b, ctx);
3727 if (b == NULL)
3729 b = TREE_OPERAND (orig_var, 1);
3730 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3732 if (integer_zerop (bias))
3733 bias = b;
3734 else
3736 bias = fold_convert_loc (clause_loc,
3737 TREE_TYPE (b), bias);
3738 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3739 TREE_TYPE (b), b, bias);
3741 orig_var = TREE_OPERAND (orig_var, 0);
3743 if (TREE_CODE (orig_var) == INDIRECT_REF
3744 || TREE_CODE (orig_var) == ADDR_EXPR)
3745 orig_var = TREE_OPERAND (orig_var, 0);
3746 tree d = OMP_CLAUSE_DECL (c);
3747 tree type = TREE_TYPE (d);
3748 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3749 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3750 const char *name = get_name (orig_var);
3751 if (TREE_CONSTANT (v))
3753 x = create_tmp_var_raw (type, name);
3754 gimple_add_tmp_var (x);
3755 TREE_ADDRESSABLE (x) = 1;
3756 x = build_fold_addr_expr_loc (clause_loc, x);
3758 else
3760 tree atmp
3761 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3762 tree t = maybe_lookup_decl (v, ctx);
3763 if (t)
3764 v = t;
3765 else
3766 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3767 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3768 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3769 TREE_TYPE (v), v,
3770 build_int_cst (TREE_TYPE (v), 1));
3771 t = fold_build2_loc (clause_loc, MULT_EXPR,
3772 TREE_TYPE (v), t,
3773 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3774 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3775 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3778 tree ptype = build_pointer_type (TREE_TYPE (type));
3779 x = fold_convert_loc (clause_loc, ptype, x);
3780 tree y = create_tmp_var (ptype, name);
3781 gimplify_assign (y, x, ilist);
3782 x = y;
3783 tree yb = y;
3785 if (!integer_zerop (bias))
3787 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3788 bias);
3789 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3791 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3792 pointer_sized_int_node, yb, bias);
3793 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3794 yb = create_tmp_var (ptype, name);
3795 gimplify_assign (yb, x, ilist);
3796 x = yb;
3799 d = TREE_OPERAND (d, 0);
3800 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3801 d = TREE_OPERAND (d, 0);
3802 if (TREE_CODE (d) == ADDR_EXPR)
3804 if (orig_var != var)
3806 gcc_assert (is_variable_sized (orig_var));
3807 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3809 gimplify_assign (new_var, x, ilist);
3810 tree new_orig_var = lookup_decl (orig_var, ctx);
3811 tree t = build_fold_indirect_ref (new_var);
3812 DECL_IGNORED_P (new_var) = 0;
3813 TREE_THIS_NOTRAP (t);
3814 SET_DECL_VALUE_EXPR (new_orig_var, t);
3815 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3817 else
3819 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3820 build_int_cst (ptype, 0));
3821 SET_DECL_VALUE_EXPR (new_var, x);
3822 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3825 else
3827 gcc_assert (orig_var == var);
3828 if (TREE_CODE (d) == INDIRECT_REF)
3830 x = create_tmp_var (ptype, name);
3831 TREE_ADDRESSABLE (x) = 1;
3832 gimplify_assign (x, yb, ilist);
3833 x = build_fold_addr_expr_loc (clause_loc, x);
3835 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3836 gimplify_assign (new_var, x, ilist);
3838 tree y1 = create_tmp_var (ptype, NULL);
3839 gimplify_assign (y1, y, ilist);
3840 tree i2 = NULL_TREE, y2 = NULL_TREE;
3841 tree body2 = NULL_TREE, end2 = NULL_TREE;
3842 tree y3 = NULL_TREE, y4 = NULL_TREE;
3843 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3845 y2 = create_tmp_var (ptype, NULL);
3846 gimplify_assign (y2, y, ilist);
3847 tree ref = build_outer_var_ref (var, ctx);
3848 /* For ref build_outer_var_ref already performs this. */
3849 if (TREE_CODE (d) == INDIRECT_REF)
3850 gcc_assert (omp_is_reference (var));
3851 else if (TREE_CODE (d) == ADDR_EXPR)
3852 ref = build_fold_addr_expr (ref);
3853 else if (omp_is_reference (var))
3854 ref = build_fold_addr_expr (ref);
3855 ref = fold_convert_loc (clause_loc, ptype, ref);
3856 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3857 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3859 y3 = create_tmp_var (ptype, NULL);
3860 gimplify_assign (y3, unshare_expr (ref), ilist);
3862 if (is_simd)
3864 y4 = create_tmp_var (ptype, NULL);
3865 gimplify_assign (y4, ref, dlist);
3868 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3869 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3870 tree body = create_artificial_label (UNKNOWN_LOCATION);
3871 tree end = create_artificial_label (UNKNOWN_LOCATION);
3872 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3873 if (y2)
3875 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3876 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3877 body2 = create_artificial_label (UNKNOWN_LOCATION);
3878 end2 = create_artificial_label (UNKNOWN_LOCATION);
3879 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3881 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3883 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3884 tree decl_placeholder
3885 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3886 SET_DECL_VALUE_EXPR (decl_placeholder,
3887 build_simple_mem_ref (y1));
3888 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3889 SET_DECL_VALUE_EXPR (placeholder,
3890 y3 ? build_simple_mem_ref (y3)
3891 : error_mark_node);
3892 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3893 x = lang_hooks.decls.omp_clause_default_ctor
3894 (c, build_simple_mem_ref (y1),
3895 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3896 if (x)
3897 gimplify_and_add (x, ilist);
3898 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3900 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3901 lower_omp (&tseq, ctx);
3902 gimple_seq_add_seq (ilist, tseq);
3904 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3905 if (is_simd)
3907 SET_DECL_VALUE_EXPR (decl_placeholder,
3908 build_simple_mem_ref (y2));
3909 SET_DECL_VALUE_EXPR (placeholder,
3910 build_simple_mem_ref (y4));
3911 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3912 lower_omp (&tseq, ctx);
3913 gimple_seq_add_seq (dlist, tseq);
3914 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3916 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3917 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3918 x = lang_hooks.decls.omp_clause_dtor
3919 (c, build_simple_mem_ref (y2));
3920 if (x)
3922 gimple_seq tseq = NULL;
3923 dtor = x;
3924 gimplify_stmt (&dtor, &tseq);
3925 gimple_seq_add_seq (dlist, tseq);
3928 else
3930 x = omp_reduction_init (c, TREE_TYPE (type));
3931 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3933 /* reduction(-:var) sums up the partial results, so it
3934 acts identically to reduction(+:var). */
3935 if (code == MINUS_EXPR)
3936 code = PLUS_EXPR;
3938 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3939 if (is_simd)
3941 x = build2 (code, TREE_TYPE (type),
3942 build_simple_mem_ref (y4),
3943 build_simple_mem_ref (y2));
3944 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3947 gimple *g
3948 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3949 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3950 gimple_seq_add_stmt (ilist, g);
3951 if (y3)
3953 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3954 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3955 gimple_seq_add_stmt (ilist, g);
3957 g = gimple_build_assign (i, PLUS_EXPR, i,
3958 build_int_cst (TREE_TYPE (i), 1));
3959 gimple_seq_add_stmt (ilist, g);
3960 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3961 gimple_seq_add_stmt (ilist, g);
3962 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3963 if (y2)
3965 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3966 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3967 gimple_seq_add_stmt (dlist, g);
3968 if (y4)
3970 g = gimple_build_assign
3971 (y4, POINTER_PLUS_EXPR, y4,
3972 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3973 gimple_seq_add_stmt (dlist, g);
3975 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3976 build_int_cst (TREE_TYPE (i2), 1));
3977 gimple_seq_add_stmt (dlist, g);
3978 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3979 gimple_seq_add_stmt (dlist, g);
3980 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3982 continue;
3984 else if (is_variable_sized (var))
3986 /* For variable sized types, we need to allocate the
3987 actual storage here. Call alloca and store the
3988 result in the pointer decl that we created elsewhere. */
3989 if (pass == 0)
3990 continue;
3992 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3994 gcall *stmt;
3995 tree tmp, atmp;
3997 ptr = DECL_VALUE_EXPR (new_var);
3998 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3999 ptr = TREE_OPERAND (ptr, 0);
4000 gcc_assert (DECL_P (ptr));
4001 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4003 /* void *tmp = __builtin_alloca */
4004 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4005 stmt = gimple_build_call (atmp, 2, x,
4006 size_int (DECL_ALIGN (var)));
4007 tmp = create_tmp_var_raw (ptr_type_node);
4008 gimple_add_tmp_var (tmp);
4009 gimple_call_set_lhs (stmt, tmp);
4011 gimple_seq_add_stmt (ilist, stmt);
4013 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4014 gimplify_assign (ptr, x, ilist);
4017 else if (omp_is_reference (var))
4019 /* For references that are being privatized for Fortran,
4020 allocate new backing storage for the new pointer
4021 variable. This allows us to avoid changing all the
4022 code that expects a pointer to something that expects
4023 a direct variable. */
4024 if (pass == 0)
4025 continue;
4027 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4028 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4030 x = build_receiver_ref (var, false, ctx);
4031 x = build_fold_addr_expr_loc (clause_loc, x);
4033 else if (TREE_CONSTANT (x))
4035 /* For reduction in SIMD loop, defer adding the
4036 initialization of the reference, because if we decide
4037 to use SIMD array for it, the initilization could cause
4038 expansion ICE. */
4039 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4040 x = NULL_TREE;
4041 else
4043 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4044 get_name (var));
4045 gimple_add_tmp_var (x);
4046 TREE_ADDRESSABLE (x) = 1;
4047 x = build_fold_addr_expr_loc (clause_loc, x);
4050 else
4052 tree atmp
4053 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4054 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4055 tree al = size_int (TYPE_ALIGN (rtype));
4056 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4059 if (x)
4061 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4062 gimplify_assign (new_var, x, ilist);
4065 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4067 else if (c_kind == OMP_CLAUSE_REDUCTION
4068 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4070 if (pass == 0)
4071 continue;
4073 else if (pass != 0)
4074 continue;
4076 switch (OMP_CLAUSE_CODE (c))
4078 case OMP_CLAUSE_SHARED:
4079 /* Ignore shared directives in teams construct. */
4080 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4081 continue;
4082 /* Shared global vars are just accessed directly. */
4083 if (is_global_var (new_var))
4084 break;
4085 /* For taskloop firstprivate/lastprivate, represented
4086 as firstprivate and shared clause on the task, new_var
4087 is the firstprivate var. */
4088 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4089 break;
4090 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4091 needs to be delayed until after fixup_child_record_type so
4092 that we get the correct type during the dereference. */
4093 by_ref = use_pointer_for_field (var, ctx);
4094 x = build_receiver_ref (var, by_ref, ctx);
4095 SET_DECL_VALUE_EXPR (new_var, x);
4096 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4098 /* ??? If VAR is not passed by reference, and the variable
4099 hasn't been initialized yet, then we'll get a warning for
4100 the store into the omp_data_s structure. Ideally, we'd be
4101 able to notice this and not store anything at all, but
4102 we're generating code too early. Suppress the warning. */
4103 if (!by_ref)
4104 TREE_NO_WARNING (var) = 1;
4105 break;
4107 case OMP_CLAUSE_LASTPRIVATE:
4108 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4109 break;
4110 /* FALLTHRU */
4112 case OMP_CLAUSE_PRIVATE:
4113 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4114 x = build_outer_var_ref (var, ctx);
4115 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4117 if (is_task_ctx (ctx))
4118 x = build_receiver_ref (var, false, ctx);
4119 else
4120 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4122 else
4123 x = NULL;
4124 do_private:
4125 tree nx;
4126 nx = lang_hooks.decls.omp_clause_default_ctor
4127 (c, unshare_expr (new_var), x);
4128 if (is_simd)
4130 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4131 if ((TREE_ADDRESSABLE (new_var) || nx || y
4132 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4133 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4134 ivar, lvar))
4136 if (nx)
4137 x = lang_hooks.decls.omp_clause_default_ctor
4138 (c, unshare_expr (ivar), x);
4139 if (nx && x)
4140 gimplify_and_add (x, &llist[0]);
4141 if (y)
4143 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4144 if (y)
4146 gimple_seq tseq = NULL;
4148 dtor = y;
4149 gimplify_stmt (&dtor, &tseq);
4150 gimple_seq_add_seq (&llist[1], tseq);
4153 break;
4156 if (nx)
4157 gimplify_and_add (nx, ilist);
4158 /* FALLTHRU */
4160 do_dtor:
4161 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4162 if (x)
4164 gimple_seq tseq = NULL;
4166 dtor = x;
4167 gimplify_stmt (&dtor, &tseq);
4168 gimple_seq_add_seq (dlist, tseq);
4170 break;
4172 case OMP_CLAUSE_LINEAR:
4173 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4174 goto do_firstprivate;
4175 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4176 x = NULL;
4177 else
4178 x = build_outer_var_ref (var, ctx);
4179 goto do_private;
4181 case OMP_CLAUSE_FIRSTPRIVATE:
4182 if (is_task_ctx (ctx))
4184 if (omp_is_reference (var) || is_variable_sized (var))
4185 goto do_dtor;
4186 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4187 ctx))
4188 || use_pointer_for_field (var, NULL))
4190 x = build_receiver_ref (var, false, ctx);
4191 SET_DECL_VALUE_EXPR (new_var, x);
4192 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4193 goto do_dtor;
4196 do_firstprivate:
4197 x = build_outer_var_ref (var, ctx);
4198 if (is_simd)
4200 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4201 && gimple_omp_for_combined_into_p (ctx->stmt))
4203 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4204 tree stept = TREE_TYPE (t);
4205 tree ct = omp_find_clause (clauses,
4206 OMP_CLAUSE__LOOPTEMP_);
4207 gcc_assert (ct);
4208 tree l = OMP_CLAUSE_DECL (ct);
4209 tree n1 = fd->loop.n1;
4210 tree step = fd->loop.step;
4211 tree itype = TREE_TYPE (l);
4212 if (POINTER_TYPE_P (itype))
4213 itype = signed_type_for (itype);
4214 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4215 if (TYPE_UNSIGNED (itype)
4216 && fd->loop.cond_code == GT_EXPR)
4217 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4218 fold_build1 (NEGATE_EXPR, itype, l),
4219 fold_build1 (NEGATE_EXPR,
4220 itype, step));
4221 else
4222 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4223 t = fold_build2 (MULT_EXPR, stept,
4224 fold_convert (stept, l), t);
4226 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4228 x = lang_hooks.decls.omp_clause_linear_ctor
4229 (c, new_var, x, t);
4230 gimplify_and_add (x, ilist);
4231 goto do_dtor;
4234 if (POINTER_TYPE_P (TREE_TYPE (x)))
4235 x = fold_build2 (POINTER_PLUS_EXPR,
4236 TREE_TYPE (x), x, t);
4237 else
4238 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4241 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4242 || TREE_ADDRESSABLE (new_var))
4243 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4244 ivar, lvar))
4246 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4248 tree iv = create_tmp_var (TREE_TYPE (new_var));
4249 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4250 gimplify_and_add (x, ilist);
4251 gimple_stmt_iterator gsi
4252 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4253 gassign *g
4254 = gimple_build_assign (unshare_expr (lvar), iv);
4255 gsi_insert_before_without_update (&gsi, g,
4256 GSI_SAME_STMT);
4257 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4258 enum tree_code code = PLUS_EXPR;
4259 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4260 code = POINTER_PLUS_EXPR;
4261 g = gimple_build_assign (iv, code, iv, t);
4262 gsi_insert_before_without_update (&gsi, g,
4263 GSI_SAME_STMT);
4264 break;
4266 x = lang_hooks.decls.omp_clause_copy_ctor
4267 (c, unshare_expr (ivar), x);
4268 gimplify_and_add (x, &llist[0]);
4269 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4270 if (x)
4272 gimple_seq tseq = NULL;
4274 dtor = x;
4275 gimplify_stmt (&dtor, &tseq);
4276 gimple_seq_add_seq (&llist[1], tseq);
4278 break;
4281 x = lang_hooks.decls.omp_clause_copy_ctor
4282 (c, unshare_expr (new_var), x);
4283 gimplify_and_add (x, ilist);
4284 goto do_dtor;
4286 case OMP_CLAUSE__LOOPTEMP_:
4287 gcc_assert (is_taskreg_ctx (ctx));
4288 x = build_outer_var_ref (var, ctx);
4289 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4290 gimplify_and_add (x, ilist);
4291 break;
4293 case OMP_CLAUSE_COPYIN:
4294 by_ref = use_pointer_for_field (var, NULL);
4295 x = build_receiver_ref (var, by_ref, ctx);
4296 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4297 append_to_statement_list (x, &copyin_seq);
4298 copyin_by_ref |= by_ref;
4299 break;
4301 case OMP_CLAUSE_REDUCTION:
4302 /* OpenACC reductions are initialized using the
4303 GOACC_REDUCTION internal function. */
4304 if (is_gimple_omp_oacc (ctx->stmt))
4305 break;
4306 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4308 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4309 gimple *tseq;
4310 x = build_outer_var_ref (var, ctx);
4312 if (omp_is_reference (var)
4313 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4314 TREE_TYPE (x)))
4315 x = build_fold_addr_expr_loc (clause_loc, x);
4316 SET_DECL_VALUE_EXPR (placeholder, x);
4317 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4318 tree new_vard = new_var;
4319 if (omp_is_reference (var))
4321 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4322 new_vard = TREE_OPERAND (new_var, 0);
4323 gcc_assert (DECL_P (new_vard));
4325 if (is_simd
4326 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4327 ivar, lvar))
4329 if (new_vard == new_var)
4331 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4332 SET_DECL_VALUE_EXPR (new_var, ivar);
4334 else
4336 SET_DECL_VALUE_EXPR (new_vard,
4337 build_fold_addr_expr (ivar));
4338 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4340 x = lang_hooks.decls.omp_clause_default_ctor
4341 (c, unshare_expr (ivar),
4342 build_outer_var_ref (var, ctx));
4343 if (x)
4344 gimplify_and_add (x, &llist[0]);
4345 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4347 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4348 lower_omp (&tseq, ctx);
4349 gimple_seq_add_seq (&llist[0], tseq);
4351 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4352 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4353 lower_omp (&tseq, ctx);
4354 gimple_seq_add_seq (&llist[1], tseq);
4355 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4356 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4357 if (new_vard == new_var)
4358 SET_DECL_VALUE_EXPR (new_var, lvar);
4359 else
4360 SET_DECL_VALUE_EXPR (new_vard,
4361 build_fold_addr_expr (lvar));
4362 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4363 if (x)
4365 tseq = NULL;
4366 dtor = x;
4367 gimplify_stmt (&dtor, &tseq);
4368 gimple_seq_add_seq (&llist[1], tseq);
4370 break;
4372 /* If this is a reference to constant size reduction var
4373 with placeholder, we haven't emitted the initializer
4374 for it because it is undesirable if SIMD arrays are used.
4375 But if they aren't used, we need to emit the deferred
4376 initialization now. */
4377 else if (omp_is_reference (var) && is_simd)
4378 handle_simd_reference (clause_loc, new_vard, ilist);
4379 x = lang_hooks.decls.omp_clause_default_ctor
4380 (c, unshare_expr (new_var),
4381 build_outer_var_ref (var, ctx));
4382 if (x)
4383 gimplify_and_add (x, ilist);
4384 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4386 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4387 lower_omp (&tseq, ctx);
4388 gimple_seq_add_seq (ilist, tseq);
4390 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4391 if (is_simd)
4393 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4394 lower_omp (&tseq, ctx);
4395 gimple_seq_add_seq (dlist, tseq);
4396 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4398 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4399 goto do_dtor;
4401 else
4403 x = omp_reduction_init (c, TREE_TYPE (new_var));
4404 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4405 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4407 /* reduction(-:var) sums up the partial results, so it
4408 acts identically to reduction(+:var). */
4409 if (code == MINUS_EXPR)
4410 code = PLUS_EXPR;
4412 tree new_vard = new_var;
4413 if (is_simd && omp_is_reference (var))
4415 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4416 new_vard = TREE_OPERAND (new_var, 0);
4417 gcc_assert (DECL_P (new_vard));
4419 if (is_simd
4420 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4421 ivar, lvar))
4423 tree ref = build_outer_var_ref (var, ctx);
4425 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4427 if (sctx.is_simt)
4429 if (!simt_lane)
4430 simt_lane = create_tmp_var (unsigned_type_node);
4431 x = build_call_expr_internal_loc
4432 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4433 TREE_TYPE (ivar), 2, ivar, simt_lane);
4434 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4435 gimplify_assign (ivar, x, &llist[2]);
4437 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4438 ref = build_outer_var_ref (var, ctx);
4439 gimplify_assign (ref, x, &llist[1]);
4441 if (new_vard != new_var)
4443 SET_DECL_VALUE_EXPR (new_vard,
4444 build_fold_addr_expr (lvar));
4445 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4448 else
4450 if (omp_is_reference (var) && is_simd)
4451 handle_simd_reference (clause_loc, new_vard, ilist);
4452 gimplify_assign (new_var, x, ilist);
4453 if (is_simd)
4455 tree ref = build_outer_var_ref (var, ctx);
4457 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4458 ref = build_outer_var_ref (var, ctx);
4459 gimplify_assign (ref, x, dlist);
4463 break;
4465 default:
4466 gcc_unreachable ();
4471 if (sctx.lane)
4473 tree uid = create_tmp_var (ptr_type_node, "simduid");
4474 /* Don't want uninit warnings on simduid, it is always uninitialized,
4475 but we use it not for the value, but for the DECL_UID only. */
4476 TREE_NO_WARNING (uid) = 1;
4477 gimple *g
4478 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4479 gimple_call_set_lhs (g, sctx.lane);
4480 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4481 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4482 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4483 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4484 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4485 gimple_omp_for_set_clauses (ctx->stmt, c);
4486 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4487 build_int_cst (unsigned_type_node, 0));
4488 gimple_seq_add_stmt (ilist, g);
4489 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4490 if (llist[2])
4492 tree simt_vf = create_tmp_var (unsigned_type_node);
4493 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4494 gimple_call_set_lhs (g, simt_vf);
4495 gimple_seq_add_stmt (dlist, g);
4497 tree t = build_int_cst (unsigned_type_node, 1);
4498 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4499 gimple_seq_add_stmt (dlist, g);
4501 t = build_int_cst (unsigned_type_node, 0);
4502 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4503 gimple_seq_add_stmt (dlist, g);
4505 tree body = create_artificial_label (UNKNOWN_LOCATION);
4506 tree header = create_artificial_label (UNKNOWN_LOCATION);
4507 tree end = create_artificial_label (UNKNOWN_LOCATION);
4508 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4509 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4511 gimple_seq_add_seq (dlist, llist[2]);
4513 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4514 gimple_seq_add_stmt (dlist, g);
4516 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4517 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4518 gimple_seq_add_stmt (dlist, g);
4520 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4522 for (int i = 0; i < 2; i++)
4523 if (llist[i])
4525 tree vf = create_tmp_var (unsigned_type_node);
4526 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4527 gimple_call_set_lhs (g, vf);
4528 gimple_seq *seq = i == 0 ? ilist : dlist;
4529 gimple_seq_add_stmt (seq, g);
4530 tree t = build_int_cst (unsigned_type_node, 0);
4531 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4532 gimple_seq_add_stmt (seq, g);
4533 tree body = create_artificial_label (UNKNOWN_LOCATION);
4534 tree header = create_artificial_label (UNKNOWN_LOCATION);
4535 tree end = create_artificial_label (UNKNOWN_LOCATION);
4536 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4537 gimple_seq_add_stmt (seq, gimple_build_label (body));
4538 gimple_seq_add_seq (seq, llist[i]);
4539 t = build_int_cst (unsigned_type_node, 1);
4540 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4541 gimple_seq_add_stmt (seq, g);
4542 gimple_seq_add_stmt (seq, gimple_build_label (header));
4543 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4544 gimple_seq_add_stmt (seq, g);
4545 gimple_seq_add_stmt (seq, gimple_build_label (end));
4549 /* The copyin sequence is not to be executed by the main thread, since
4550 that would result in self-copies. Perhaps not visible to scalars,
4551 but it certainly is to C++ operator=. */
4552 if (copyin_seq)
4554 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4556 x = build2 (NE_EXPR, boolean_type_node, x,
4557 build_int_cst (TREE_TYPE (x), 0));
4558 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4559 gimplify_and_add (x, ilist);
4562 /* If any copyin variable is passed by reference, we must ensure the
4563 master thread doesn't modify it before it is copied over in all
4564 threads. Similarly for variables in both firstprivate and
4565 lastprivate clauses we need to ensure the lastprivate copying
4566 happens after firstprivate copying in all threads. And similarly
4567 for UDRs if initializer expression refers to omp_orig. */
4568 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4570 /* Don't add any barrier for #pragma omp simd or
4571 #pragma omp distribute. */
4572 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4573 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4574 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4577 /* If max_vf is non-zero, then we can use only a vectorization factor
4578 up to the max_vf we chose. So stick it into the safelen clause. */
4579 if (sctx.max_vf)
4581 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4582 OMP_CLAUSE_SAFELEN);
4583 if (c == NULL_TREE
4584 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4585 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4586 sctx.max_vf) == 1))
4588 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4589 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4590 sctx.max_vf);
4591 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4592 gimple_omp_for_set_clauses (ctx->stmt, c);
4598 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4599 both parallel and workshare constructs. PREDICATE may be NULL if it's
4600 always true. */
4602 static void
4603 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4604 omp_context *ctx)
4606 tree x, c, label = NULL, orig_clauses = clauses;
4607 bool par_clauses = false;
4608 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4610 /* Early exit if there are no lastprivate or linear clauses. */
4611 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4612 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4613 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4614 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4615 break;
4616 if (clauses == NULL)
4618 /* If this was a workshare clause, see if it had been combined
4619 with its parallel. In that case, look for the clauses on the
4620 parallel statement itself. */
4621 if (is_parallel_ctx (ctx))
4622 return;
4624 ctx = ctx->outer;
4625 if (ctx == NULL || !is_parallel_ctx (ctx))
4626 return;
4628 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4629 OMP_CLAUSE_LASTPRIVATE);
4630 if (clauses == NULL)
4631 return;
4632 par_clauses = true;
4635 bool maybe_simt = false;
4636 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4637 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4639 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4640 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4641 if (simduid)
4642 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4645 if (predicate)
4647 gcond *stmt;
4648 tree label_true, arm1, arm2;
4649 enum tree_code pred_code = TREE_CODE (predicate);
4651 label = create_artificial_label (UNKNOWN_LOCATION);
4652 label_true = create_artificial_label (UNKNOWN_LOCATION);
4653 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4655 arm1 = TREE_OPERAND (predicate, 0);
4656 arm2 = TREE_OPERAND (predicate, 1);
4657 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4658 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4660 else
4662 arm1 = predicate;
4663 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4664 arm2 = boolean_false_node;
4665 pred_code = NE_EXPR;
4667 if (maybe_simt)
4669 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4670 c = fold_convert (integer_type_node, c);
4671 simtcond = create_tmp_var (integer_type_node);
4672 gimplify_assign (simtcond, c, stmt_list);
4673 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4674 1, simtcond);
4675 c = create_tmp_var (integer_type_node);
4676 gimple_call_set_lhs (g, c);
4677 gimple_seq_add_stmt (stmt_list, g);
4678 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4679 label_true, label);
4681 else
4682 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4683 gimple_seq_add_stmt (stmt_list, stmt);
4684 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4687 for (c = clauses; c ;)
4689 tree var, new_var;
4690 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4693 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4694 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4696 var = OMP_CLAUSE_DECL (c);
4697 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4698 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4699 && is_taskloop_ctx (ctx))
4701 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4702 new_var = lookup_decl (var, ctx->outer);
4704 else
4706 new_var = lookup_decl (var, ctx);
4707 /* Avoid uninitialized warnings for lastprivate and
4708 for linear iterators. */
4709 if (predicate
4710 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4711 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4712 TREE_NO_WARNING (new_var) = 1;
4715 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4717 tree val = DECL_VALUE_EXPR (new_var);
4718 if (TREE_CODE (val) == ARRAY_REF
4719 && VAR_P (TREE_OPERAND (val, 0))
4720 && lookup_attribute ("omp simd array",
4721 DECL_ATTRIBUTES (TREE_OPERAND (val,
4722 0))))
4724 if (lastlane == NULL)
4726 lastlane = create_tmp_var (unsigned_type_node);
4727 gcall *g
4728 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4729 2, simduid,
4730 TREE_OPERAND (val, 1));
4731 gimple_call_set_lhs (g, lastlane);
4732 gimple_seq_add_stmt (stmt_list, g);
4734 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4735 TREE_OPERAND (val, 0), lastlane,
4736 NULL_TREE, NULL_TREE);
4737 if (maybe_simt)
4739 gcall *g;
4740 if (simtlast == NULL)
4742 simtlast = create_tmp_var (unsigned_type_node);
4743 g = gimple_build_call_internal
4744 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4745 gimple_call_set_lhs (g, simtlast);
4746 gimple_seq_add_stmt (stmt_list, g);
4748 x = build_call_expr_internal_loc
4749 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4750 TREE_TYPE (new_var), 2, new_var, simtlast);
4751 new_var = unshare_expr (new_var);
4752 gimplify_assign (new_var, x, stmt_list);
4753 new_var = unshare_expr (new_var);
4758 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4759 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4761 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4762 gimple_seq_add_seq (stmt_list,
4763 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4764 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4766 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4767 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4769 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4770 gimple_seq_add_seq (stmt_list,
4771 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4772 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4775 x = NULL_TREE;
4776 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4777 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4779 gcc_checking_assert (is_taskloop_ctx (ctx));
4780 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4781 ctx->outer->outer);
4782 if (is_global_var (ovar))
4783 x = ovar;
4785 if (!x)
4786 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4787 if (omp_is_reference (var))
4788 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4789 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4790 gimplify_and_add (x, stmt_list);
4792 c = OMP_CLAUSE_CHAIN (c);
4793 if (c == NULL && !par_clauses)
4795 /* If this was a workshare clause, see if it had been combined
4796 with its parallel. In that case, continue looking for the
4797 clauses also on the parallel statement itself. */
4798 if (is_parallel_ctx (ctx))
4799 break;
4801 ctx = ctx->outer;
4802 if (ctx == NULL || !is_parallel_ctx (ctx))
4803 break;
4805 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4806 OMP_CLAUSE_LASTPRIVATE);
4807 par_clauses = true;
4811 if (label)
4812 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4815 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4816 (which might be a placeholder). INNER is true if this is an inner
4817 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4818 join markers. Generate the before-loop forking sequence in
4819 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4820 general form of these sequences is
4822 GOACC_REDUCTION_SETUP
4823 GOACC_FORK
4824 GOACC_REDUCTION_INIT
4826 GOACC_REDUCTION_FINI
4827 GOACC_JOIN
4828 GOACC_REDUCTION_TEARDOWN. */
4830 static void
4831 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4832 gcall *fork, gcall *join, gimple_seq *fork_seq,
4833 gimple_seq *join_seq, omp_context *ctx)
4835 gimple_seq before_fork = NULL;
4836 gimple_seq after_fork = NULL;
4837 gimple_seq before_join = NULL;
4838 gimple_seq after_join = NULL;
4839 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4840 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4841 unsigned offset = 0;
4843 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4844 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4846 tree orig = OMP_CLAUSE_DECL (c);
4847 tree var = maybe_lookup_decl (orig, ctx);
4848 tree ref_to_res = NULL_TREE;
4849 tree incoming, outgoing, v1, v2, v3;
4850 bool is_private = false;
4852 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4853 if (rcode == MINUS_EXPR)
4854 rcode = PLUS_EXPR;
4855 else if (rcode == TRUTH_ANDIF_EXPR)
4856 rcode = BIT_AND_EXPR;
4857 else if (rcode == TRUTH_ORIF_EXPR)
4858 rcode = BIT_IOR_EXPR;
4859 tree op = build_int_cst (unsigned_type_node, rcode);
4861 if (!var)
4862 var = orig;
4864 incoming = outgoing = var;
4866 if (!inner)
4868 /* See if an outer construct also reduces this variable. */
4869 omp_context *outer = ctx;
4871 while (omp_context *probe = outer->outer)
4873 enum gimple_code type = gimple_code (probe->stmt);
4874 tree cls;
4876 switch (type)
4878 case GIMPLE_OMP_FOR:
4879 cls = gimple_omp_for_clauses (probe->stmt);
4880 break;
4882 case GIMPLE_OMP_TARGET:
4883 if (gimple_omp_target_kind (probe->stmt)
4884 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4885 goto do_lookup;
4887 cls = gimple_omp_target_clauses (probe->stmt);
4888 break;
4890 default:
4891 goto do_lookup;
4894 outer = probe;
4895 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4896 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4897 && orig == OMP_CLAUSE_DECL (cls))
4899 incoming = outgoing = lookup_decl (orig, probe);
4900 goto has_outer_reduction;
4902 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4903 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4904 && orig == OMP_CLAUSE_DECL (cls))
4906 is_private = true;
4907 goto do_lookup;
4911 do_lookup:
4912 /* This is the outermost construct with this reduction,
4913 see if there's a mapping for it. */
4914 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4915 && maybe_lookup_field (orig, outer) && !is_private)
4917 ref_to_res = build_receiver_ref (orig, false, outer);
4918 if (omp_is_reference (orig))
4919 ref_to_res = build_simple_mem_ref (ref_to_res);
4921 tree type = TREE_TYPE (var);
4922 if (POINTER_TYPE_P (type))
4923 type = TREE_TYPE (type);
4925 outgoing = var;
4926 incoming = omp_reduction_init_op (loc, rcode, type);
4928 else
4930 /* Try to look at enclosing contexts for reduction var,
4931 use original if no mapping found. */
4932 tree t = NULL_TREE;
4933 omp_context *c = ctx->outer;
4934 while (c && !t)
4936 t = maybe_lookup_decl (orig, c);
4937 c = c->outer;
4939 incoming = outgoing = (t ? t : orig);
4942 has_outer_reduction:;
4945 if (!ref_to_res)
4946 ref_to_res = integer_zero_node;
4948 if (omp_is_reference (orig))
4950 tree type = TREE_TYPE (var);
4951 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4953 if (!inner)
4955 tree x = create_tmp_var (TREE_TYPE (type), id);
4956 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4959 v1 = create_tmp_var (type, id);
4960 v2 = create_tmp_var (type, id);
4961 v3 = create_tmp_var (type, id);
4963 gimplify_assign (v1, var, fork_seq);
4964 gimplify_assign (v2, var, fork_seq);
4965 gimplify_assign (v3, var, fork_seq);
4967 var = build_simple_mem_ref (var);
4968 v1 = build_simple_mem_ref (v1);
4969 v2 = build_simple_mem_ref (v2);
4970 v3 = build_simple_mem_ref (v3);
4971 outgoing = build_simple_mem_ref (outgoing);
4973 if (!TREE_CONSTANT (incoming))
4974 incoming = build_simple_mem_ref (incoming);
4976 else
4977 v1 = v2 = v3 = var;
4979 /* Determine position in reduction buffer, which may be used
4980 by target. */
4981 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
4982 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
4983 offset = (offset + align - 1) & ~(align - 1);
4984 tree off = build_int_cst (sizetype, offset);
4985 offset += GET_MODE_SIZE (mode);
4987 if (!init_code)
4989 init_code = build_int_cst (integer_type_node,
4990 IFN_GOACC_REDUCTION_INIT);
4991 fini_code = build_int_cst (integer_type_node,
4992 IFN_GOACC_REDUCTION_FINI);
4993 setup_code = build_int_cst (integer_type_node,
4994 IFN_GOACC_REDUCTION_SETUP);
4995 teardown_code = build_int_cst (integer_type_node,
4996 IFN_GOACC_REDUCTION_TEARDOWN);
4999 tree setup_call
5000 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5001 TREE_TYPE (var), 6, setup_code,
5002 unshare_expr (ref_to_res),
5003 incoming, level, op, off);
5004 tree init_call
5005 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5006 TREE_TYPE (var), 6, init_code,
5007 unshare_expr (ref_to_res),
5008 v1, level, op, off);
5009 tree fini_call
5010 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5011 TREE_TYPE (var), 6, fini_code,
5012 unshare_expr (ref_to_res),
5013 v2, level, op, off);
5014 tree teardown_call
5015 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5016 TREE_TYPE (var), 6, teardown_code,
5017 ref_to_res, v3, level, op, off);
5019 gimplify_assign (v1, setup_call, &before_fork);
5020 gimplify_assign (v2, init_call, &after_fork);
5021 gimplify_assign (v3, fini_call, &before_join);
5022 gimplify_assign (outgoing, teardown_call, &after_join);
5025 /* Now stitch things together. */
5026 gimple_seq_add_seq (fork_seq, before_fork);
5027 if (fork)
5028 gimple_seq_add_stmt (fork_seq, fork);
5029 gimple_seq_add_seq (fork_seq, after_fork);
5031 gimple_seq_add_seq (join_seq, before_join);
5032 if (join)
5033 gimple_seq_add_stmt (join_seq, join);
5034 gimple_seq_add_seq (join_seq, after_join);
5037 /* Generate code to implement the REDUCTION clauses. */
5039 static void
5040 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5042 gimple_seq sub_seq = NULL;
5043 gimple *stmt;
5044 tree x, c;
5045 int count = 0;
5047 /* OpenACC loop reductions are handled elsewhere. */
5048 if (is_gimple_omp_oacc (ctx->stmt))
5049 return;
5051 /* SIMD reductions are handled in lower_rec_input_clauses. */
5052 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5053 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5054 return;
5056 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5057 update in that case, otherwise use a lock. */
5058 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5059 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5061 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5062 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5064 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5065 count = -1;
5066 break;
5068 count++;
5071 if (count == 0)
5072 return;
5074 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5076 tree var, ref, new_var, orig_var;
5077 enum tree_code code;
5078 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5080 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5081 continue;
5083 orig_var = var = OMP_CLAUSE_DECL (c);
5084 if (TREE_CODE (var) == MEM_REF)
5086 var = TREE_OPERAND (var, 0);
5087 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5088 var = TREE_OPERAND (var, 0);
5089 if (TREE_CODE (var) == INDIRECT_REF
5090 || TREE_CODE (var) == ADDR_EXPR)
5091 var = TREE_OPERAND (var, 0);
5092 orig_var = var;
5093 if (is_variable_sized (var))
5095 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5096 var = DECL_VALUE_EXPR (var);
5097 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5098 var = TREE_OPERAND (var, 0);
5099 gcc_assert (DECL_P (var));
5102 new_var = lookup_decl (var, ctx);
5103 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5104 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5105 ref = build_outer_var_ref (var, ctx);
5106 code = OMP_CLAUSE_REDUCTION_CODE (c);
5108 /* reduction(-:var) sums up the partial results, so it acts
5109 identically to reduction(+:var). */
5110 if (code == MINUS_EXPR)
5111 code = PLUS_EXPR;
5113 if (count == 1)
5115 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5117 addr = save_expr (addr);
5118 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5119 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5120 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5121 gimplify_and_add (x, stmt_seqp);
5122 return;
5124 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5126 tree d = OMP_CLAUSE_DECL (c);
5127 tree type = TREE_TYPE (d);
5128 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5129 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5130 tree ptype = build_pointer_type (TREE_TYPE (type));
5131 tree bias = TREE_OPERAND (d, 1);
5132 d = TREE_OPERAND (d, 0);
5133 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5135 tree b = TREE_OPERAND (d, 1);
5136 b = maybe_lookup_decl (b, ctx);
5137 if (b == NULL)
5139 b = TREE_OPERAND (d, 1);
5140 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5142 if (integer_zerop (bias))
5143 bias = b;
5144 else
5146 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5147 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5148 TREE_TYPE (b), b, bias);
5150 d = TREE_OPERAND (d, 0);
5152 /* For ref build_outer_var_ref already performs this, so
5153 only new_var needs a dereference. */
5154 if (TREE_CODE (d) == INDIRECT_REF)
5156 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5157 gcc_assert (omp_is_reference (var) && var == orig_var);
5159 else if (TREE_CODE (d) == ADDR_EXPR)
5161 if (orig_var == var)
5163 new_var = build_fold_addr_expr (new_var);
5164 ref = build_fold_addr_expr (ref);
5167 else
5169 gcc_assert (orig_var == var);
5170 if (omp_is_reference (var))
5171 ref = build_fold_addr_expr (ref);
5173 if (DECL_P (v))
5175 tree t = maybe_lookup_decl (v, ctx);
5176 if (t)
5177 v = t;
5178 else
5179 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5180 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5182 if (!integer_zerop (bias))
5184 bias = fold_convert_loc (clause_loc, sizetype, bias);
5185 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5186 TREE_TYPE (new_var), new_var,
5187 unshare_expr (bias));
5188 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5189 TREE_TYPE (ref), ref, bias);
5191 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5192 ref = fold_convert_loc (clause_loc, ptype, ref);
5193 tree m = create_tmp_var (ptype, NULL);
5194 gimplify_assign (m, new_var, stmt_seqp);
5195 new_var = m;
5196 m = create_tmp_var (ptype, NULL);
5197 gimplify_assign (m, ref, stmt_seqp);
5198 ref = m;
5199 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5200 tree body = create_artificial_label (UNKNOWN_LOCATION);
5201 tree end = create_artificial_label (UNKNOWN_LOCATION);
5202 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5203 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5204 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5205 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5207 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5208 tree decl_placeholder
5209 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5210 SET_DECL_VALUE_EXPR (placeholder, out);
5211 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5212 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5213 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5214 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5215 gimple_seq_add_seq (&sub_seq,
5216 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5217 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5218 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5219 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5221 else
5223 x = build2 (code, TREE_TYPE (out), out, priv);
5224 out = unshare_expr (out);
5225 gimplify_assign (out, x, &sub_seq);
5227 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5228 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5229 gimple_seq_add_stmt (&sub_seq, g);
5230 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5231 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5232 gimple_seq_add_stmt (&sub_seq, g);
5233 g = gimple_build_assign (i, PLUS_EXPR, i,
5234 build_int_cst (TREE_TYPE (i), 1));
5235 gimple_seq_add_stmt (&sub_seq, g);
5236 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5237 gimple_seq_add_stmt (&sub_seq, g);
5238 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5240 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5242 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5244 if (omp_is_reference (var)
5245 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5246 TREE_TYPE (ref)))
5247 ref = build_fold_addr_expr_loc (clause_loc, ref);
5248 SET_DECL_VALUE_EXPR (placeholder, ref);
5249 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5250 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5251 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5252 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5253 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5255 else
5257 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5258 ref = build_outer_var_ref (var, ctx);
5259 gimplify_assign (ref, x, &sub_seq);
5263 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5265 gimple_seq_add_stmt (stmt_seqp, stmt);
5267 gimple_seq_add_seq (stmt_seqp, sub_seq);
5269 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5271 gimple_seq_add_stmt (stmt_seqp, stmt);
5275 /* Generate code to implement the COPYPRIVATE clauses. */
5277 static void
5278 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5279 omp_context *ctx)
5281 tree c;
5283 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5285 tree var, new_var, ref, x;
5286 bool by_ref;
5287 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5289 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5290 continue;
5292 var = OMP_CLAUSE_DECL (c);
5293 by_ref = use_pointer_for_field (var, NULL);
5295 ref = build_sender_ref (var, ctx);
5296 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5297 if (by_ref)
5299 x = build_fold_addr_expr_loc (clause_loc, new_var);
5300 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5302 gimplify_assign (ref, x, slist);
5304 ref = build_receiver_ref (var, false, ctx);
5305 if (by_ref)
5307 ref = fold_convert_loc (clause_loc,
5308 build_pointer_type (TREE_TYPE (new_var)),
5309 ref);
5310 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5312 if (omp_is_reference (var))
5314 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5315 ref = build_simple_mem_ref_loc (clause_loc, ref);
5316 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5318 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5319 gimplify_and_add (x, rlist);
5324 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5325 and REDUCTION from the sender (aka parent) side. */
5327 static void
5328 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5329 omp_context *ctx)
5331 tree c, t;
5332 int ignored_looptemp = 0;
5333 bool is_taskloop = false;
5335 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5336 by GOMP_taskloop. */
5337 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5339 ignored_looptemp = 2;
5340 is_taskloop = true;
5343 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5345 tree val, ref, x, var;
5346 bool by_ref, do_in = false, do_out = false;
5347 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5349 switch (OMP_CLAUSE_CODE (c))
5351 case OMP_CLAUSE_PRIVATE:
5352 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5353 break;
5354 continue;
5355 case OMP_CLAUSE_FIRSTPRIVATE:
5356 case OMP_CLAUSE_COPYIN:
5357 case OMP_CLAUSE_LASTPRIVATE:
5358 case OMP_CLAUSE_REDUCTION:
5359 break;
5360 case OMP_CLAUSE_SHARED:
5361 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5362 break;
5363 continue;
5364 case OMP_CLAUSE__LOOPTEMP_:
5365 if (ignored_looptemp)
5367 ignored_looptemp--;
5368 continue;
5370 break;
5371 default:
5372 continue;
5375 val = OMP_CLAUSE_DECL (c);
5376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5377 && TREE_CODE (val) == MEM_REF)
5379 val = TREE_OPERAND (val, 0);
5380 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5381 val = TREE_OPERAND (val, 0);
5382 if (TREE_CODE (val) == INDIRECT_REF
5383 || TREE_CODE (val) == ADDR_EXPR)
5384 val = TREE_OPERAND (val, 0);
5385 if (is_variable_sized (val))
5386 continue;
5389 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5390 outer taskloop region. */
5391 omp_context *ctx_for_o = ctx;
5392 if (is_taskloop
5393 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5394 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5395 ctx_for_o = ctx->outer;
5397 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5399 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5400 && is_global_var (var))
5401 continue;
5403 t = omp_member_access_dummy_var (var);
5404 if (t)
5406 var = DECL_VALUE_EXPR (var);
5407 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5408 if (o != t)
5409 var = unshare_and_remap (var, t, o);
5410 else
5411 var = unshare_expr (var);
5414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5416 /* Handle taskloop firstprivate/lastprivate, where the
5417 lastprivate on GIMPLE_OMP_TASK is represented as
5418 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5419 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5420 x = omp_build_component_ref (ctx->sender_decl, f);
5421 if (use_pointer_for_field (val, ctx))
5422 var = build_fold_addr_expr (var);
5423 gimplify_assign (x, var, ilist);
5424 DECL_ABSTRACT_ORIGIN (f) = NULL;
5425 continue;
5428 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5429 || val == OMP_CLAUSE_DECL (c))
5430 && is_variable_sized (val))
5431 continue;
5432 by_ref = use_pointer_for_field (val, NULL);
5434 switch (OMP_CLAUSE_CODE (c))
5436 case OMP_CLAUSE_FIRSTPRIVATE:
5437 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5438 && !by_ref
5439 && is_task_ctx (ctx))
5440 TREE_NO_WARNING (var) = 1;
5441 do_in = true;
5442 break;
5444 case OMP_CLAUSE_PRIVATE:
5445 case OMP_CLAUSE_COPYIN:
5446 case OMP_CLAUSE__LOOPTEMP_:
5447 do_in = true;
5448 break;
5450 case OMP_CLAUSE_LASTPRIVATE:
5451 if (by_ref || omp_is_reference (val))
5453 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5454 continue;
5455 do_in = true;
5457 else
5459 do_out = true;
5460 if (lang_hooks.decls.omp_private_outer_ref (val))
5461 do_in = true;
5463 break;
5465 case OMP_CLAUSE_REDUCTION:
5466 do_in = true;
5467 if (val == OMP_CLAUSE_DECL (c))
5468 do_out = !(by_ref || omp_is_reference (val));
5469 else
5470 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5471 break;
5473 default:
5474 gcc_unreachable ();
5477 if (do_in)
5479 ref = build_sender_ref (val, ctx);
5480 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5481 gimplify_assign (ref, x, ilist);
5482 if (is_task_ctx (ctx))
5483 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5486 if (do_out)
5488 ref = build_sender_ref (val, ctx);
5489 gimplify_assign (var, ref, olist);
5494 /* Generate code to implement SHARED from the sender (aka parent)
5495 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5496 list things that got automatically shared. */
5498 static void
5499 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5501 tree var, ovar, nvar, t, f, x, record_type;
5503 if (ctx->record_type == NULL)
5504 return;
5506 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5507 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5509 ovar = DECL_ABSTRACT_ORIGIN (f);
5510 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5511 continue;
5513 nvar = maybe_lookup_decl (ovar, ctx);
5514 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5515 continue;
5517 /* If CTX is a nested parallel directive. Find the immediately
5518 enclosing parallel or workshare construct that contains a
5519 mapping for OVAR. */
5520 var = lookup_decl_in_outer_ctx (ovar, ctx);
5522 t = omp_member_access_dummy_var (var);
5523 if (t)
5525 var = DECL_VALUE_EXPR (var);
5526 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5527 if (o != t)
5528 var = unshare_and_remap (var, t, o);
5529 else
5530 var = unshare_expr (var);
5533 if (use_pointer_for_field (ovar, ctx))
5535 x = build_sender_ref (ovar, ctx);
5536 var = build_fold_addr_expr (var);
5537 gimplify_assign (x, var, ilist);
5539 else
5541 x = build_sender_ref (ovar, ctx);
5542 gimplify_assign (x, var, ilist);
5544 if (!TREE_READONLY (var)
5545 /* We don't need to receive a new reference to a result
5546 or parm decl. In fact we may not store to it as we will
5547 invalidate any pending RSO and generate wrong gimple
5548 during inlining. */
5549 && !((TREE_CODE (var) == RESULT_DECL
5550 || TREE_CODE (var) == PARM_DECL)
5551 && DECL_BY_REFERENCE (var)))
5553 x = build_sender_ref (ovar, ctx);
5554 gimplify_assign (var, x, olist);
5560 /* Emit an OpenACC head marker call, encapulating the partitioning and
5561 other information that must be processed by the target compiler.
5562 Return the maximum number of dimensions the associated loop might
5563 be partitioned over. */
5565 static unsigned
5566 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5567 gimple_seq *seq, omp_context *ctx)
5569 unsigned levels = 0;
5570 unsigned tag = 0;
5571 tree gang_static = NULL_TREE;
5572 auto_vec<tree, 5> args;
5574 args.quick_push (build_int_cst
5575 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5576 args.quick_push (ddvar);
5577 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5579 switch (OMP_CLAUSE_CODE (c))
5581 case OMP_CLAUSE_GANG:
5582 tag |= OLF_DIM_GANG;
5583 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5584 /* static:* is represented by -1, and we can ignore it, as
5585 scheduling is always static. */
5586 if (gang_static && integer_minus_onep (gang_static))
5587 gang_static = NULL_TREE;
5588 levels++;
5589 break;
5591 case OMP_CLAUSE_WORKER:
5592 tag |= OLF_DIM_WORKER;
5593 levels++;
5594 break;
5596 case OMP_CLAUSE_VECTOR:
5597 tag |= OLF_DIM_VECTOR;
5598 levels++;
5599 break;
5601 case OMP_CLAUSE_SEQ:
5602 tag |= OLF_SEQ;
5603 break;
5605 case OMP_CLAUSE_AUTO:
5606 tag |= OLF_AUTO;
5607 break;
5609 case OMP_CLAUSE_INDEPENDENT:
5610 tag |= OLF_INDEPENDENT;
5611 break;
5613 default:
5614 continue;
5618 if (gang_static)
5620 if (DECL_P (gang_static))
5621 gang_static = build_outer_var_ref (gang_static, ctx);
5622 tag |= OLF_GANG_STATIC;
5625 /* In a parallel region, loops are implicitly INDEPENDENT. */
5626 omp_context *tgt = enclosing_target_ctx (ctx);
5627 if (!tgt || is_oacc_parallel (tgt))
5628 tag |= OLF_INDEPENDENT;
5630 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR is implicitly AUTO. */
5631 if (!(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1) << OLF_DIM_BASE)
5632 | OLF_SEQ)))
5633 tag |= OLF_AUTO;
5635 /* Ensure at least one level. */
5636 if (!levels)
5637 levels++;
5639 args.quick_push (build_int_cst (integer_type_node, levels));
5640 args.quick_push (build_int_cst (integer_type_node, tag));
5641 if (gang_static)
5642 args.quick_push (gang_static);
5644 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5645 gimple_set_location (call, loc);
5646 gimple_set_lhs (call, ddvar);
5647 gimple_seq_add_stmt (seq, call);
5649 return levels;
5652 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5653 partitioning level of the enclosed region. */
5655 static void
5656 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5657 tree tofollow, gimple_seq *seq)
5659 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5660 : IFN_UNIQUE_OACC_TAIL_MARK);
5661 tree marker = build_int_cst (integer_type_node, marker_kind);
5662 int nargs = 2 + (tofollow != NULL_TREE);
5663 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5664 marker, ddvar, tofollow);
5665 gimple_set_location (call, loc);
5666 gimple_set_lhs (call, ddvar);
5667 gimple_seq_add_stmt (seq, call);
5670 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5671 the loop clauses, from which we extract reductions. Initialize
5672 HEAD and TAIL. */
5674 static void
5675 lower_oacc_head_tail (location_t loc, tree clauses,
5676 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5678 bool inner = false;
5679 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5680 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5682 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5683 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5684 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5686 gcc_assert (count);
5687 for (unsigned done = 1; count; count--, done++)
5689 gimple_seq fork_seq = NULL;
5690 gimple_seq join_seq = NULL;
5692 tree place = build_int_cst (integer_type_node, -1);
5693 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5694 fork_kind, ddvar, place);
5695 gimple_set_location (fork, loc);
5696 gimple_set_lhs (fork, ddvar);
5698 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5699 join_kind, ddvar, place);
5700 gimple_set_location (join, loc);
5701 gimple_set_lhs (join, ddvar);
5703 /* Mark the beginning of this level sequence. */
5704 if (inner)
5705 lower_oacc_loop_marker (loc, ddvar, true,
5706 build_int_cst (integer_type_node, count),
5707 &fork_seq);
5708 lower_oacc_loop_marker (loc, ddvar, false,
5709 build_int_cst (integer_type_node, done),
5710 &join_seq);
5712 lower_oacc_reductions (loc, clauses, place, inner,
5713 fork, join, &fork_seq, &join_seq, ctx);
5715 /* Append this level to head. */
5716 gimple_seq_add_seq (head, fork_seq);
5717 /* Prepend it to tail. */
5718 gimple_seq_add_seq (&join_seq, *tail);
5719 *tail = join_seq;
5721 inner = true;
5724 /* Mark the end of the sequence. */
5725 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5726 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5729 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5730 catch handler and return it. This prevents programs from violating the
5731 structured block semantics with throws. */
5733 static gimple_seq
5734 maybe_catch_exception (gimple_seq body)
5736 gimple *g;
5737 tree decl;
5739 if (!flag_exceptions)
5740 return body;
5742 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5743 decl = lang_hooks.eh_protect_cleanup_actions ();
5744 else
5745 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5747 g = gimple_build_eh_must_not_throw (decl);
5748 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5749 GIMPLE_TRY_CATCH);
5751 return gimple_seq_alloc_with_stmt (g);
5755 /* Routines to lower OMP directives into OMP-GIMPLE. */
5757 /* If ctx is a worksharing context inside of a cancellable parallel
5758 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5759 and conditional branch to parallel's cancel_label to handle
5760 cancellation in the implicit barrier. */
5762 static void
5763 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5765 gimple *omp_return = gimple_seq_last_stmt (*body);
5766 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5767 if (gimple_omp_return_nowait_p (omp_return))
5768 return;
5769 if (ctx->outer
5770 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5771 && ctx->outer->cancellable)
5773 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5774 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5775 tree lhs = create_tmp_var (c_bool_type);
5776 gimple_omp_return_set_lhs (omp_return, lhs);
5777 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5778 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5779 fold_convert (c_bool_type,
5780 boolean_false_node),
5781 ctx->outer->cancel_label, fallthru_label);
5782 gimple_seq_add_stmt (body, g);
5783 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5787 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5788 CTX is the enclosing OMP context for the current statement. */
5790 static void
5791 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5793 tree block, control;
5794 gimple_stmt_iterator tgsi;
5795 gomp_sections *stmt;
5796 gimple *t;
5797 gbind *new_stmt, *bind;
5798 gimple_seq ilist, dlist, olist, new_body;
5800 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5802 push_gimplify_context ();
5804 dlist = NULL;
5805 ilist = NULL;
5806 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5807 &ilist, &dlist, ctx, NULL);
5809 new_body = gimple_omp_body (stmt);
5810 gimple_omp_set_body (stmt, NULL);
5811 tgsi = gsi_start (new_body);
5812 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5814 omp_context *sctx;
5815 gimple *sec_start;
5817 sec_start = gsi_stmt (tgsi);
5818 sctx = maybe_lookup_ctx (sec_start);
5819 gcc_assert (sctx);
5821 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5822 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5823 GSI_CONTINUE_LINKING);
5824 gimple_omp_set_body (sec_start, NULL);
5826 if (gsi_one_before_end_p (tgsi))
5828 gimple_seq l = NULL;
5829 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5830 &l, ctx);
5831 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5832 gimple_omp_section_set_last (sec_start);
5835 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5836 GSI_CONTINUE_LINKING);
5839 block = make_node (BLOCK);
5840 bind = gimple_build_bind (NULL, new_body, block);
5842 olist = NULL;
5843 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5845 block = make_node (BLOCK);
5846 new_stmt = gimple_build_bind (NULL, NULL, block);
5847 gsi_replace (gsi_p, new_stmt, true);
5849 pop_gimplify_context (new_stmt);
5850 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5851 BLOCK_VARS (block) = gimple_bind_vars (bind);
5852 if (BLOCK_VARS (block))
5853 TREE_USED (block) = 1;
5855 new_body = NULL;
5856 gimple_seq_add_seq (&new_body, ilist);
5857 gimple_seq_add_stmt (&new_body, stmt);
5858 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5859 gimple_seq_add_stmt (&new_body, bind);
5861 control = create_tmp_var (unsigned_type_node, ".section");
5862 t = gimple_build_omp_continue (control, control);
5863 gimple_omp_sections_set_control (stmt, control);
5864 gimple_seq_add_stmt (&new_body, t);
5866 gimple_seq_add_seq (&new_body, olist);
5867 if (ctx->cancellable)
5868 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5869 gimple_seq_add_seq (&new_body, dlist);
5871 new_body = maybe_catch_exception (new_body);
5873 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5874 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5875 t = gimple_build_omp_return (nowait);
5876 gimple_seq_add_stmt (&new_body, t);
5877 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5879 gimple_bind_set_body (new_stmt, new_body);
5883 /* A subroutine of lower_omp_single. Expand the simple form of
5884 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5886 if (GOMP_single_start ())
5887 BODY;
5888 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5890 FIXME. It may be better to delay expanding the logic of this until
5891 pass_expand_omp. The expanded logic may make the job more difficult
5892 to a synchronization analysis pass. */
5894 static void
5895 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5897 location_t loc = gimple_location (single_stmt);
5898 tree tlabel = create_artificial_label (loc);
5899 tree flabel = create_artificial_label (loc);
5900 gimple *call, *cond;
5901 tree lhs, decl;
5903 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5904 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5905 call = gimple_build_call (decl, 0);
5906 gimple_call_set_lhs (call, lhs);
5907 gimple_seq_add_stmt (pre_p, call);
5909 cond = gimple_build_cond (EQ_EXPR, lhs,
5910 fold_convert_loc (loc, TREE_TYPE (lhs),
5911 boolean_true_node),
5912 tlabel, flabel);
5913 gimple_seq_add_stmt (pre_p, cond);
5914 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5915 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5916 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5920 /* A subroutine of lower_omp_single. Expand the simple form of
5921 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5923 #pragma omp single copyprivate (a, b, c)
5925 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5928 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5930 BODY;
5931 copyout.a = a;
5932 copyout.b = b;
5933 copyout.c = c;
5934 GOMP_single_copy_end (&copyout);
5936 else
5938 a = copyout_p->a;
5939 b = copyout_p->b;
5940 c = copyout_p->c;
5942 GOMP_barrier ();
5945 FIXME. It may be better to delay expanding the logic of this until
5946 pass_expand_omp. The expanded logic may make the job more difficult
5947 to a synchronization analysis pass. */
5949 static void
5950 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5951 omp_context *ctx)
5953 tree ptr_type, t, l0, l1, l2, bfn_decl;
5954 gimple_seq copyin_seq;
5955 location_t loc = gimple_location (single_stmt);
5957 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5959 ptr_type = build_pointer_type (ctx->record_type);
5960 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5962 l0 = create_artificial_label (loc);
5963 l1 = create_artificial_label (loc);
5964 l2 = create_artificial_label (loc);
5966 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
5967 t = build_call_expr_loc (loc, bfn_decl, 0);
5968 t = fold_convert_loc (loc, ptr_type, t);
5969 gimplify_assign (ctx->receiver_decl, t, pre_p);
5971 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5972 build_int_cst (ptr_type, 0));
5973 t = build3 (COND_EXPR, void_type_node, t,
5974 build_and_jump (&l0), build_and_jump (&l1));
5975 gimplify_and_add (t, pre_p);
5977 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
5979 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5981 copyin_seq = NULL;
5982 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
5983 &copyin_seq, ctx);
5985 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
5986 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
5987 t = build_call_expr_loc (loc, bfn_decl, 1, t);
5988 gimplify_and_add (t, pre_p);
5990 t = build_and_jump (&l2);
5991 gimplify_and_add (t, pre_p);
5993 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
5995 gimple_seq_add_seq (pre_p, copyin_seq);
5997 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6001 /* Expand code for an OpenMP single directive. */
6003 static void
6004 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6006 tree block;
6007 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6008 gbind *bind;
6009 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6011 push_gimplify_context ();
6013 block = make_node (BLOCK);
6014 bind = gimple_build_bind (NULL, NULL, block);
6015 gsi_replace (gsi_p, bind, true);
6016 bind_body = NULL;
6017 dlist = NULL;
6018 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6019 &bind_body, &dlist, ctx, NULL);
6020 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6022 gimple_seq_add_stmt (&bind_body, single_stmt);
6024 if (ctx->record_type)
6025 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6026 else
6027 lower_omp_single_simple (single_stmt, &bind_body);
6029 gimple_omp_set_body (single_stmt, NULL);
6031 gimple_seq_add_seq (&bind_body, dlist);
6033 bind_body = maybe_catch_exception (bind_body);
6035 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6036 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6037 gimple *g = gimple_build_omp_return (nowait);
6038 gimple_seq_add_stmt (&bind_body_tail, g);
6039 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6040 if (ctx->record_type)
6042 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6043 tree clobber = build_constructor (ctx->record_type, NULL);
6044 TREE_THIS_VOLATILE (clobber) = 1;
6045 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6046 clobber), GSI_SAME_STMT);
6048 gimple_seq_add_seq (&bind_body, bind_body_tail);
6049 gimple_bind_set_body (bind, bind_body);
6051 pop_gimplify_context (bind);
6053 gimple_bind_append_vars (bind, ctx->block_vars);
6054 BLOCK_VARS (block) = ctx->block_vars;
6055 if (BLOCK_VARS (block))
6056 TREE_USED (block) = 1;
6060 /* Expand code for an OpenMP master directive. */
6062 static void
6063 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6065 tree block, lab = NULL, x, bfn_decl;
6066 gimple *stmt = gsi_stmt (*gsi_p);
6067 gbind *bind;
6068 location_t loc = gimple_location (stmt);
6069 gimple_seq tseq;
6071 push_gimplify_context ();
6073 block = make_node (BLOCK);
6074 bind = gimple_build_bind (NULL, NULL, block);
6075 gsi_replace (gsi_p, bind, true);
6076 gimple_bind_add_stmt (bind, stmt);
6078 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6079 x = build_call_expr_loc (loc, bfn_decl, 0);
6080 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6081 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6082 tseq = NULL;
6083 gimplify_and_add (x, &tseq);
6084 gimple_bind_add_seq (bind, tseq);
6086 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6087 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6088 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6089 gimple_omp_set_body (stmt, NULL);
6091 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6093 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6095 pop_gimplify_context (bind);
6097 gimple_bind_append_vars (bind, ctx->block_vars);
6098 BLOCK_VARS (block) = ctx->block_vars;
6102 /* Expand code for an OpenMP taskgroup directive. */
6104 static void
6105 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6107 gimple *stmt = gsi_stmt (*gsi_p);
6108 gcall *x;
6109 gbind *bind;
6110 tree block = make_node (BLOCK);
6112 bind = gimple_build_bind (NULL, NULL, block);
6113 gsi_replace (gsi_p, bind, true);
6114 gimple_bind_add_stmt (bind, stmt);
6116 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6118 gimple_bind_add_stmt (bind, x);
6120 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6121 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6122 gimple_omp_set_body (stmt, NULL);
6124 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6126 gimple_bind_append_vars (bind, ctx->block_vars);
6127 BLOCK_VARS (block) = ctx->block_vars;
6131 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6133 static void
6134 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6135 omp_context *ctx)
6137 struct omp_for_data fd;
6138 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6139 return;
6141 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6142 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6143 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6144 if (!fd.ordered)
6145 return;
6147 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6148 tree c = gimple_omp_ordered_clauses (ord_stmt);
6149 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6150 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6152 /* Merge depend clauses from multiple adjacent
6153 #pragma omp ordered depend(sink:...) constructs
6154 into one #pragma omp ordered depend(sink:...), so that
6155 we can optimize them together. */
6156 gimple_stmt_iterator gsi = *gsi_p;
6157 gsi_next (&gsi);
6158 while (!gsi_end_p (gsi))
6160 gimple *stmt = gsi_stmt (gsi);
6161 if (is_gimple_debug (stmt)
6162 || gimple_code (stmt) == GIMPLE_NOP)
6164 gsi_next (&gsi);
6165 continue;
6167 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6168 break;
6169 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6170 c = gimple_omp_ordered_clauses (ord_stmt2);
6171 if (c == NULL_TREE
6172 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6173 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6174 break;
6175 while (*list_p)
6176 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6177 *list_p = c;
6178 gsi_remove (&gsi, true);
6182 /* Canonicalize sink dependence clauses into one folded clause if
6183 possible.
6185 The basic algorithm is to create a sink vector whose first
6186 element is the GCD of all the first elements, and whose remaining
6187 elements are the minimum of the subsequent columns.
6189 We ignore dependence vectors whose first element is zero because
6190 such dependencies are known to be executed by the same thread.
6192 We take into account the direction of the loop, so a minimum
6193 becomes a maximum if the loop is iterating forwards. We also
6194 ignore sink clauses where the loop direction is unknown, or where
6195 the offsets are clearly invalid because they are not a multiple
6196 of the loop increment.
6198 For example:
6200 #pragma omp for ordered(2)
6201 for (i=0; i < N; ++i)
6202 for (j=0; j < M; ++j)
6204 #pragma omp ordered \
6205 depend(sink:i-8,j-2) \
6206 depend(sink:i,j-1) \ // Completely ignored because i+0.
6207 depend(sink:i-4,j-3) \
6208 depend(sink:i-6,j-4)
6209 #pragma omp ordered depend(source)
6212 Folded clause is:
6214 depend(sink:-gcd(8,4,6),-min(2,3,4))
6215 -or-
6216 depend(sink:-2,-2)
6219 /* FIXME: Computing GCD's where the first element is zero is
6220 non-trivial in the presence of collapsed loops. Do this later. */
6221 if (fd.collapse > 1)
6222 return;
6224 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6225 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6226 tree folded_dep = NULL_TREE;
6227 /* TRUE if the first dimension's offset is negative. */
6228 bool neg_offset_p = false;
6230 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6231 unsigned int i;
6232 while ((c = *list_p) != NULL)
6234 bool remove = false;
6236 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6237 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6238 goto next_ordered_clause;
6240 tree vec;
6241 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6242 vec && TREE_CODE (vec) == TREE_LIST;
6243 vec = TREE_CHAIN (vec), ++i)
6245 gcc_assert (i < len);
6247 /* omp_extract_for_data has canonicalized the condition. */
6248 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6249 || fd.loops[i].cond_code == GT_EXPR);
6250 bool forward = fd.loops[i].cond_code == LT_EXPR;
6251 bool maybe_lexically_later = true;
6253 /* While the committee makes up its mind, bail if we have any
6254 non-constant steps. */
6255 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6256 goto lower_omp_ordered_ret;
6258 tree itype = TREE_TYPE (TREE_VALUE (vec));
6259 if (POINTER_TYPE_P (itype))
6260 itype = sizetype;
6261 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6262 TYPE_PRECISION (itype),
6263 TYPE_SIGN (itype));
6265 /* Ignore invalid offsets that are not multiples of the step. */
6266 if (!wi::multiple_of_p
6267 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6268 UNSIGNED))
6270 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6271 "ignoring sink clause with offset that is not "
6272 "a multiple of the loop step");
6273 remove = true;
6274 goto next_ordered_clause;
6277 /* Calculate the first dimension. The first dimension of
6278 the folded dependency vector is the GCD of the first
6279 elements, while ignoring any first elements whose offset
6280 is 0. */
6281 if (i == 0)
6283 /* Ignore dependence vectors whose first dimension is 0. */
6284 if (offset == 0)
6286 remove = true;
6287 goto next_ordered_clause;
6289 else
6291 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6293 error_at (OMP_CLAUSE_LOCATION (c),
6294 "first offset must be in opposite direction "
6295 "of loop iterations");
6296 goto lower_omp_ordered_ret;
6298 if (forward)
6299 offset = -offset;
6300 neg_offset_p = forward;
6301 /* Initialize the first time around. */
6302 if (folded_dep == NULL_TREE)
6304 folded_dep = c;
6305 folded_deps[0] = offset;
6307 else
6308 folded_deps[0] = wi::gcd (folded_deps[0],
6309 offset, UNSIGNED);
6312 /* Calculate minimum for the remaining dimensions. */
6313 else
6315 folded_deps[len + i - 1] = offset;
6316 if (folded_dep == c)
6317 folded_deps[i] = offset;
6318 else if (maybe_lexically_later
6319 && !wi::eq_p (folded_deps[i], offset))
6321 if (forward ^ wi::gts_p (folded_deps[i], offset))
6323 unsigned int j;
6324 folded_dep = c;
6325 for (j = 1; j <= i; j++)
6326 folded_deps[j] = folded_deps[len + j - 1];
6328 else
6329 maybe_lexically_later = false;
6333 gcc_assert (i == len);
6335 remove = true;
6337 next_ordered_clause:
6338 if (remove)
6339 *list_p = OMP_CLAUSE_CHAIN (c);
6340 else
6341 list_p = &OMP_CLAUSE_CHAIN (c);
6344 if (folded_dep)
6346 if (neg_offset_p)
6347 folded_deps[0] = -folded_deps[0];
6349 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6350 if (POINTER_TYPE_P (itype))
6351 itype = sizetype;
6353 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6354 = wide_int_to_tree (itype, folded_deps[0]);
6355 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6356 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6359 lower_omp_ordered_ret:
6361 /* Ordered without clauses is #pragma omp threads, while we want
6362 a nop instead if we remove all clauses. */
6363 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6364 gsi_replace (gsi_p, gimple_build_nop (), true);
6368 /* Expand code for an OpenMP ordered directive. */
6370 static void
6371 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6373 tree block;
6374 gimple *stmt = gsi_stmt (*gsi_p), *g;
6375 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6376 gcall *x;
6377 gbind *bind;
6378 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6379 OMP_CLAUSE_SIMD);
6380 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6381 loop. */
6382 bool maybe_simt
6383 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6384 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6385 OMP_CLAUSE_THREADS);
6387 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6388 OMP_CLAUSE_DEPEND))
6390 /* FIXME: This is needs to be moved to the expansion to verify various
6391 conditions only testable on cfg with dominators computed, and also
6392 all the depend clauses to be merged still might need to be available
6393 for the runtime checks. */
6394 if (0)
6395 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6396 return;
6399 push_gimplify_context ();
6401 block = make_node (BLOCK);
6402 bind = gimple_build_bind (NULL, NULL, block);
6403 gsi_replace (gsi_p, bind, true);
6404 gimple_bind_add_stmt (bind, stmt);
6406 if (simd)
6408 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6409 build_int_cst (NULL_TREE, threads));
6410 cfun->has_simduid_loops = true;
6412 else
6413 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6415 gimple_bind_add_stmt (bind, x);
6417 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6418 if (maybe_simt)
6420 counter = create_tmp_var (integer_type_node);
6421 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6422 gimple_call_set_lhs (g, counter);
6423 gimple_bind_add_stmt (bind, g);
6425 body = create_artificial_label (UNKNOWN_LOCATION);
6426 test = create_artificial_label (UNKNOWN_LOCATION);
6427 gimple_bind_add_stmt (bind, gimple_build_label (body));
6429 tree simt_pred = create_tmp_var (integer_type_node);
6430 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6431 gimple_call_set_lhs (g, simt_pred);
6432 gimple_bind_add_stmt (bind, g);
6434 tree t = create_artificial_label (UNKNOWN_LOCATION);
6435 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6436 gimple_bind_add_stmt (bind, g);
6438 gimple_bind_add_stmt (bind, gimple_build_label (t));
6440 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6441 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6442 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6443 gimple_omp_set_body (stmt, NULL);
6445 if (maybe_simt)
6447 gimple_bind_add_stmt (bind, gimple_build_label (test));
6448 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6449 gimple_bind_add_stmt (bind, g);
6451 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6452 tree nonneg = create_tmp_var (integer_type_node);
6453 gimple_seq tseq = NULL;
6454 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6455 gimple_bind_add_seq (bind, tseq);
6457 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6458 gimple_call_set_lhs (g, nonneg);
6459 gimple_bind_add_stmt (bind, g);
6461 tree end = create_artificial_label (UNKNOWN_LOCATION);
6462 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6463 gimple_bind_add_stmt (bind, g);
6465 gimple_bind_add_stmt (bind, gimple_build_label (end));
6467 if (simd)
6468 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6469 build_int_cst (NULL_TREE, threads));
6470 else
6471 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6473 gimple_bind_add_stmt (bind, x);
6475 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6477 pop_gimplify_context (bind);
6479 gimple_bind_append_vars (bind, ctx->block_vars);
6480 BLOCK_VARS (block) = gimple_bind_vars (bind);
6484 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6485 substitution of a couple of function calls. But in the NAMED case,
6486 requires that languages coordinate a symbol name. It is therefore
6487 best put here in common code. */
6489 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6491 static void
6492 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6494 tree block;
6495 tree name, lock, unlock;
6496 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6497 gbind *bind;
6498 location_t loc = gimple_location (stmt);
6499 gimple_seq tbody;
6501 name = gimple_omp_critical_name (stmt);
6502 if (name)
6504 tree decl;
6506 if (!critical_name_mutexes)
6507 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6509 tree *n = critical_name_mutexes->get (name);
6510 if (n == NULL)
6512 char *new_str;
6514 decl = create_tmp_var_raw (ptr_type_node);
6516 new_str = ACONCAT ((".gomp_critical_user_",
6517 IDENTIFIER_POINTER (name), NULL));
6518 DECL_NAME (decl) = get_identifier (new_str);
6519 TREE_PUBLIC (decl) = 1;
6520 TREE_STATIC (decl) = 1;
6521 DECL_COMMON (decl) = 1;
6522 DECL_ARTIFICIAL (decl) = 1;
6523 DECL_IGNORED_P (decl) = 1;
6525 varpool_node::finalize_decl (decl);
6527 critical_name_mutexes->put (name, decl);
6529 else
6530 decl = *n;
6532 /* If '#pragma omp critical' is inside offloaded region or
6533 inside function marked as offloadable, the symbol must be
6534 marked as offloadable too. */
6535 omp_context *octx;
6536 if (cgraph_node::get (current_function_decl)->offloadable)
6537 varpool_node::get_create (decl)->offloadable = 1;
6538 else
6539 for (octx = ctx->outer; octx; octx = octx->outer)
6540 if (is_gimple_omp_offloaded (octx->stmt))
6542 varpool_node::get_create (decl)->offloadable = 1;
6543 break;
6546 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6547 lock = build_call_expr_loc (loc, lock, 1,
6548 build_fold_addr_expr_loc (loc, decl));
6550 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6551 unlock = build_call_expr_loc (loc, unlock, 1,
6552 build_fold_addr_expr_loc (loc, decl));
6554 else
6556 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6557 lock = build_call_expr_loc (loc, lock, 0);
6559 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6560 unlock = build_call_expr_loc (loc, unlock, 0);
6563 push_gimplify_context ();
6565 block = make_node (BLOCK);
6566 bind = gimple_build_bind (NULL, NULL, block);
6567 gsi_replace (gsi_p, bind, true);
6568 gimple_bind_add_stmt (bind, stmt);
6570 tbody = gimple_bind_body (bind);
6571 gimplify_and_add (lock, &tbody);
6572 gimple_bind_set_body (bind, tbody);
6574 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6575 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6576 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6577 gimple_omp_set_body (stmt, NULL);
6579 tbody = gimple_bind_body (bind);
6580 gimplify_and_add (unlock, &tbody);
6581 gimple_bind_set_body (bind, tbody);
6583 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6585 pop_gimplify_context (bind);
6586 gimple_bind_append_vars (bind, ctx->block_vars);
6587 BLOCK_VARS (block) = gimple_bind_vars (bind);
6590 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6591 for a lastprivate clause. Given a loop control predicate of (V
6592 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6593 is appended to *DLIST, iterator initialization is appended to
6594 *BODY_P. */
6596 static void
6597 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6598 gimple_seq *dlist, struct omp_context *ctx)
6600 tree clauses, cond, vinit;
6601 enum tree_code cond_code;
6602 gimple_seq stmts;
6604 cond_code = fd->loop.cond_code;
6605 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6607 /* When possible, use a strict equality expression. This can let VRP
6608 type optimizations deduce the value and remove a copy. */
6609 if (tree_fits_shwi_p (fd->loop.step))
6611 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6612 if (step == 1 || step == -1)
6613 cond_code = EQ_EXPR;
6616 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6617 || gimple_omp_for_grid_phony (fd->for_stmt))
6618 cond = omp_grid_lastprivate_predicate (fd);
6619 else
6621 tree n2 = fd->loop.n2;
6622 if (fd->collapse > 1
6623 && TREE_CODE (n2) != INTEGER_CST
6624 && gimple_omp_for_combined_into_p (fd->for_stmt))
6626 struct omp_context *taskreg_ctx = NULL;
6627 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6629 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6630 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6631 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6633 if (gimple_omp_for_combined_into_p (gfor))
6635 gcc_assert (ctx->outer->outer
6636 && is_parallel_ctx (ctx->outer->outer));
6637 taskreg_ctx = ctx->outer->outer;
6639 else
6641 struct omp_for_data outer_fd;
6642 omp_extract_for_data (gfor, &outer_fd, NULL);
6643 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6646 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6647 taskreg_ctx = ctx->outer->outer;
6649 else if (is_taskreg_ctx (ctx->outer))
6650 taskreg_ctx = ctx->outer;
6651 if (taskreg_ctx)
6653 int i;
6654 tree taskreg_clauses
6655 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6656 tree innerc = omp_find_clause (taskreg_clauses,
6657 OMP_CLAUSE__LOOPTEMP_);
6658 gcc_assert (innerc);
6659 for (i = 0; i < fd->collapse; i++)
6661 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6662 OMP_CLAUSE__LOOPTEMP_);
6663 gcc_assert (innerc);
6665 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6666 OMP_CLAUSE__LOOPTEMP_);
6667 if (innerc)
6668 n2 = fold_convert (TREE_TYPE (n2),
6669 lookup_decl (OMP_CLAUSE_DECL (innerc),
6670 taskreg_ctx));
6673 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6676 clauses = gimple_omp_for_clauses (fd->for_stmt);
6677 stmts = NULL;
6678 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6679 if (!gimple_seq_empty_p (stmts))
6681 gimple_seq_add_seq (&stmts, *dlist);
6682 *dlist = stmts;
6684 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6685 vinit = fd->loop.n1;
6686 if (cond_code == EQ_EXPR
6687 && tree_fits_shwi_p (fd->loop.n2)
6688 && ! integer_zerop (fd->loop.n2))
6689 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6690 else
6691 vinit = unshare_expr (vinit);
6693 /* Initialize the iterator variable, so that threads that don't execute
6694 any iterations don't execute the lastprivate clauses by accident. */
6695 gimplify_assign (fd->loop.v, vinit, body_p);
6700 /* Lower code for an OMP loop directive. */
6702 static void
6703 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6705 tree *rhs_p, block;
6706 struct omp_for_data fd, *fdp = NULL;
6707 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6708 gbind *new_stmt;
6709 gimple_seq omp_for_body, body, dlist;
6710 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6711 size_t i;
6713 push_gimplify_context ();
6715 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6717 block = make_node (BLOCK);
6718 new_stmt = gimple_build_bind (NULL, NULL, block);
6719 /* Replace at gsi right away, so that 'stmt' is no member
6720 of a sequence anymore as we're going to add to a different
6721 one below. */
6722 gsi_replace (gsi_p, new_stmt, true);
6724 /* Move declaration of temporaries in the loop body before we make
6725 it go away. */
6726 omp_for_body = gimple_omp_body (stmt);
6727 if (!gimple_seq_empty_p (omp_for_body)
6728 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6730 gbind *inner_bind
6731 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6732 tree vars = gimple_bind_vars (inner_bind);
6733 gimple_bind_append_vars (new_stmt, vars);
6734 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6735 keep them on the inner_bind and it's block. */
6736 gimple_bind_set_vars (inner_bind, NULL_TREE);
6737 if (gimple_bind_block (inner_bind))
6738 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6741 if (gimple_omp_for_combined_into_p (stmt))
6743 omp_extract_for_data (stmt, &fd, NULL);
6744 fdp = &fd;
6746 /* We need two temporaries with fd.loop.v type (istart/iend)
6747 and then (fd.collapse - 1) temporaries with the same
6748 type for count2 ... countN-1 vars if not constant. */
6749 size_t count = 2;
6750 tree type = fd.iter_type;
6751 if (fd.collapse > 1
6752 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6753 count += fd.collapse - 1;
6754 bool taskreg_for
6755 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6756 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6757 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6758 tree simtc = NULL;
6759 tree clauses = *pc;
6760 if (taskreg_for)
6761 outerc
6762 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6763 OMP_CLAUSE__LOOPTEMP_);
6764 if (ctx->simt_stmt)
6765 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6766 OMP_CLAUSE__LOOPTEMP_);
6767 for (i = 0; i < count; i++)
6769 tree temp;
6770 if (taskreg_for)
6772 gcc_assert (outerc);
6773 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6774 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6775 OMP_CLAUSE__LOOPTEMP_);
6777 else
6779 /* If there are 2 adjacent SIMD stmts, one with _simt_
6780 clause, another without, make sure they have the same
6781 decls in _looptemp_ clauses, because the outer stmt
6782 they are combined into will look up just one inner_stmt. */
6783 if (ctx->simt_stmt)
6784 temp = OMP_CLAUSE_DECL (simtc);
6785 else
6786 temp = create_tmp_var (type);
6787 insert_decl_map (&ctx->outer->cb, temp, temp);
6789 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6790 OMP_CLAUSE_DECL (*pc) = temp;
6791 pc = &OMP_CLAUSE_CHAIN (*pc);
6792 if (ctx->simt_stmt)
6793 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6794 OMP_CLAUSE__LOOPTEMP_);
6796 *pc = clauses;
6799 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6800 dlist = NULL;
6801 body = NULL;
6802 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6803 fdp);
6804 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6806 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6808 /* Lower the header expressions. At this point, we can assume that
6809 the header is of the form:
6811 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6813 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6814 using the .omp_data_s mapping, if needed. */
6815 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6817 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6818 if (!is_gimple_min_invariant (*rhs_p))
6819 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6821 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6822 if (!is_gimple_min_invariant (*rhs_p))
6823 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6825 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6826 if (!is_gimple_min_invariant (*rhs_p))
6827 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6830 /* Once lowered, extract the bounds and clauses. */
6831 omp_extract_for_data (stmt, &fd, NULL);
6833 if (is_gimple_omp_oacc (ctx->stmt)
6834 && !ctx_in_oacc_kernels_region (ctx))
6835 lower_oacc_head_tail (gimple_location (stmt),
6836 gimple_omp_for_clauses (stmt),
6837 &oacc_head, &oacc_tail, ctx);
6839 /* Add OpenACC partitioning and reduction markers just before the loop. */
6840 if (oacc_head)
6841 gimple_seq_add_seq (&body, oacc_head);
6843 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6845 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6846 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6847 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6848 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6850 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6851 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6852 OMP_CLAUSE_LINEAR_STEP (c)
6853 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6854 ctx);
6857 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6858 && gimple_omp_for_grid_phony (stmt));
6859 if (!phony_loop)
6860 gimple_seq_add_stmt (&body, stmt);
6861 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6863 if (!phony_loop)
6864 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6865 fd.loop.v));
6867 /* After the loop, add exit clauses. */
6868 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6870 if (ctx->cancellable)
6871 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6873 gimple_seq_add_seq (&body, dlist);
6875 body = maybe_catch_exception (body);
6877 if (!phony_loop)
6879 /* Region exit marker goes at the end of the loop body. */
6880 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6881 maybe_add_implicit_barrier_cancel (ctx, &body);
6884 /* Add OpenACC joining and reduction markers just after the loop. */
6885 if (oacc_tail)
6886 gimple_seq_add_seq (&body, oacc_tail);
6888 pop_gimplify_context (new_stmt);
6890 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6891 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6892 if (BLOCK_VARS (block))
6893 TREE_USED (block) = 1;
6895 gimple_bind_set_body (new_stmt, body);
6896 gimple_omp_set_body (stmt, NULL);
6897 gimple_omp_for_set_pre_body (stmt, NULL);
6900 /* Callback for walk_stmts. Check if the current statement only contains
6901 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6903 static tree
6904 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6905 bool *handled_ops_p,
6906 struct walk_stmt_info *wi)
6908 int *info = (int *) wi->info;
6909 gimple *stmt = gsi_stmt (*gsi_p);
6911 *handled_ops_p = true;
6912 switch (gimple_code (stmt))
6914 WALK_SUBSTMTS;
6916 case GIMPLE_OMP_FOR:
6917 case GIMPLE_OMP_SECTIONS:
6918 *info = *info == 0 ? 1 : -1;
6919 break;
6920 default:
6921 *info = -1;
6922 break;
6924 return NULL;
6927 struct omp_taskcopy_context
6929 /* This field must be at the beginning, as we do "inheritance": Some
6930 callback functions for tree-inline.c (e.g., omp_copy_decl)
6931 receive a copy_body_data pointer that is up-casted to an
6932 omp_context pointer. */
6933 copy_body_data cb;
6934 omp_context *ctx;
6937 static tree
6938 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6940 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6942 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6943 return create_tmp_var (TREE_TYPE (var));
6945 return var;
6948 static tree
6949 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6951 tree name, new_fields = NULL, type, f;
6953 type = lang_hooks.types.make_type (RECORD_TYPE);
6954 name = DECL_NAME (TYPE_NAME (orig_type));
6955 name = build_decl (gimple_location (tcctx->ctx->stmt),
6956 TYPE_DECL, name, type);
6957 TYPE_NAME (type) = name;
6959 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
6961 tree new_f = copy_node (f);
6962 DECL_CONTEXT (new_f) = type;
6963 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
6964 TREE_CHAIN (new_f) = new_fields;
6965 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6966 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6967 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
6968 &tcctx->cb, NULL);
6969 new_fields = new_f;
6970 tcctx->cb.decl_map->put (f, new_f);
6972 TYPE_FIELDS (type) = nreverse (new_fields);
6973 layout_type (type);
6974 return type;
6977 /* Create task copyfn. */
6979 static void
6980 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
6982 struct function *child_cfun;
6983 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
6984 tree record_type, srecord_type, bind, list;
6985 bool record_needs_remap = false, srecord_needs_remap = false;
6986 splay_tree_node n;
6987 struct omp_taskcopy_context tcctx;
6988 location_t loc = gimple_location (task_stmt);
6990 child_fn = gimple_omp_task_copy_fn (task_stmt);
6991 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
6992 gcc_assert (child_cfun->cfg == NULL);
6993 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
6995 /* Reset DECL_CONTEXT on function arguments. */
6996 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
6997 DECL_CONTEXT (t) = child_fn;
6999 /* Populate the function. */
7000 push_gimplify_context ();
7001 push_cfun (child_cfun);
7003 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7004 TREE_SIDE_EFFECTS (bind) = 1;
7005 list = NULL;
7006 DECL_SAVED_TREE (child_fn) = bind;
7007 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7009 /* Remap src and dst argument types if needed. */
7010 record_type = ctx->record_type;
7011 srecord_type = ctx->srecord_type;
7012 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7013 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7015 record_needs_remap = true;
7016 break;
7018 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7019 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7021 srecord_needs_remap = true;
7022 break;
7025 if (record_needs_remap || srecord_needs_remap)
7027 memset (&tcctx, '\0', sizeof (tcctx));
7028 tcctx.cb.src_fn = ctx->cb.src_fn;
7029 tcctx.cb.dst_fn = child_fn;
7030 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7031 gcc_checking_assert (tcctx.cb.src_node);
7032 tcctx.cb.dst_node = tcctx.cb.src_node;
7033 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7034 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7035 tcctx.cb.eh_lp_nr = 0;
7036 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7037 tcctx.cb.decl_map = new hash_map<tree, tree>;
7038 tcctx.ctx = ctx;
7040 if (record_needs_remap)
7041 record_type = task_copyfn_remap_type (&tcctx, record_type);
7042 if (srecord_needs_remap)
7043 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7045 else
7046 tcctx.cb.decl_map = NULL;
7048 arg = DECL_ARGUMENTS (child_fn);
7049 TREE_TYPE (arg) = build_pointer_type (record_type);
7050 sarg = DECL_CHAIN (arg);
7051 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7053 /* First pass: initialize temporaries used in record_type and srecord_type
7054 sizes and field offsets. */
7055 if (tcctx.cb.decl_map)
7056 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7057 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7059 tree *p;
7061 decl = OMP_CLAUSE_DECL (c);
7062 p = tcctx.cb.decl_map->get (decl);
7063 if (p == NULL)
7064 continue;
7065 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7066 sf = (tree) n->value;
7067 sf = *tcctx.cb.decl_map->get (sf);
7068 src = build_simple_mem_ref_loc (loc, sarg);
7069 src = omp_build_component_ref (src, sf);
7070 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7071 append_to_statement_list (t, &list);
7074 /* Second pass: copy shared var pointers and copy construct non-VLA
7075 firstprivate vars. */
7076 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7077 switch (OMP_CLAUSE_CODE (c))
7079 splay_tree_key key;
7080 case OMP_CLAUSE_SHARED:
7081 decl = OMP_CLAUSE_DECL (c);
7082 key = (splay_tree_key) decl;
7083 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7084 key = (splay_tree_key) &DECL_UID (decl);
7085 n = splay_tree_lookup (ctx->field_map, key);
7086 if (n == NULL)
7087 break;
7088 f = (tree) n->value;
7089 if (tcctx.cb.decl_map)
7090 f = *tcctx.cb.decl_map->get (f);
7091 n = splay_tree_lookup (ctx->sfield_map, key);
7092 sf = (tree) n->value;
7093 if (tcctx.cb.decl_map)
7094 sf = *tcctx.cb.decl_map->get (sf);
7095 src = build_simple_mem_ref_loc (loc, sarg);
7096 src = omp_build_component_ref (src, sf);
7097 dst = build_simple_mem_ref_loc (loc, arg);
7098 dst = omp_build_component_ref (dst, f);
7099 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7100 append_to_statement_list (t, &list);
7101 break;
7102 case OMP_CLAUSE_FIRSTPRIVATE:
7103 decl = OMP_CLAUSE_DECL (c);
7104 if (is_variable_sized (decl))
7105 break;
7106 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7107 if (n == NULL)
7108 break;
7109 f = (tree) n->value;
7110 if (tcctx.cb.decl_map)
7111 f = *tcctx.cb.decl_map->get (f);
7112 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7113 if (n != NULL)
7115 sf = (tree) n->value;
7116 if (tcctx.cb.decl_map)
7117 sf = *tcctx.cb.decl_map->get (sf);
7118 src = build_simple_mem_ref_loc (loc, sarg);
7119 src = omp_build_component_ref (src, sf);
7120 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7121 src = build_simple_mem_ref_loc (loc, src);
7123 else
7124 src = decl;
7125 dst = build_simple_mem_ref_loc (loc, arg);
7126 dst = omp_build_component_ref (dst, f);
7127 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7128 append_to_statement_list (t, &list);
7129 break;
7130 case OMP_CLAUSE_PRIVATE:
7131 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7132 break;
7133 decl = OMP_CLAUSE_DECL (c);
7134 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7135 f = (tree) n->value;
7136 if (tcctx.cb.decl_map)
7137 f = *tcctx.cb.decl_map->get (f);
7138 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7139 if (n != NULL)
7141 sf = (tree) n->value;
7142 if (tcctx.cb.decl_map)
7143 sf = *tcctx.cb.decl_map->get (sf);
7144 src = build_simple_mem_ref_loc (loc, sarg);
7145 src = omp_build_component_ref (src, sf);
7146 if (use_pointer_for_field (decl, NULL))
7147 src = build_simple_mem_ref_loc (loc, src);
7149 else
7150 src = decl;
7151 dst = build_simple_mem_ref_loc (loc, arg);
7152 dst = omp_build_component_ref (dst, f);
7153 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7154 append_to_statement_list (t, &list);
7155 break;
7156 default:
7157 break;
7160 /* Last pass: handle VLA firstprivates. */
7161 if (tcctx.cb.decl_map)
7162 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7163 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7165 tree ind, ptr, df;
7167 decl = OMP_CLAUSE_DECL (c);
7168 if (!is_variable_sized (decl))
7169 continue;
7170 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7171 if (n == NULL)
7172 continue;
7173 f = (tree) n->value;
7174 f = *tcctx.cb.decl_map->get (f);
7175 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7176 ind = DECL_VALUE_EXPR (decl);
7177 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7178 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7179 n = splay_tree_lookup (ctx->sfield_map,
7180 (splay_tree_key) TREE_OPERAND (ind, 0));
7181 sf = (tree) n->value;
7182 sf = *tcctx.cb.decl_map->get (sf);
7183 src = build_simple_mem_ref_loc (loc, sarg);
7184 src = omp_build_component_ref (src, sf);
7185 src = build_simple_mem_ref_loc (loc, src);
7186 dst = build_simple_mem_ref_loc (loc, arg);
7187 dst = omp_build_component_ref (dst, f);
7188 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7189 append_to_statement_list (t, &list);
7190 n = splay_tree_lookup (ctx->field_map,
7191 (splay_tree_key) TREE_OPERAND (ind, 0));
7192 df = (tree) n->value;
7193 df = *tcctx.cb.decl_map->get (df);
7194 ptr = build_simple_mem_ref_loc (loc, arg);
7195 ptr = omp_build_component_ref (ptr, df);
7196 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7197 build_fold_addr_expr_loc (loc, dst));
7198 append_to_statement_list (t, &list);
7201 t = build1 (RETURN_EXPR, void_type_node, NULL);
7202 append_to_statement_list (t, &list);
7204 if (tcctx.cb.decl_map)
7205 delete tcctx.cb.decl_map;
7206 pop_gimplify_context (NULL);
7207 BIND_EXPR_BODY (bind) = list;
7208 pop_cfun ();
7211 static void
7212 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7214 tree c, clauses;
7215 gimple *g;
7216 size_t n_in = 0, n_out = 0, idx = 2, i;
7218 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7219 gcc_assert (clauses);
7220 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7221 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7222 switch (OMP_CLAUSE_DEPEND_KIND (c))
7224 case OMP_CLAUSE_DEPEND_IN:
7225 n_in++;
7226 break;
7227 case OMP_CLAUSE_DEPEND_OUT:
7228 case OMP_CLAUSE_DEPEND_INOUT:
7229 n_out++;
7230 break;
7231 case OMP_CLAUSE_DEPEND_SOURCE:
7232 case OMP_CLAUSE_DEPEND_SINK:
7233 /* FALLTHRU */
7234 default:
7235 gcc_unreachable ();
7237 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7238 tree array = create_tmp_var (type);
7239 TREE_ADDRESSABLE (array) = 1;
7240 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7241 NULL_TREE);
7242 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7243 gimple_seq_add_stmt (iseq, g);
7244 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7245 NULL_TREE);
7246 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7247 gimple_seq_add_stmt (iseq, g);
7248 for (i = 0; i < 2; i++)
7250 if ((i ? n_in : n_out) == 0)
7251 continue;
7252 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7254 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7256 tree t = OMP_CLAUSE_DECL (c);
7257 t = fold_convert (ptr_type_node, t);
7258 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7259 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7260 NULL_TREE, NULL_TREE);
7261 g = gimple_build_assign (r, t);
7262 gimple_seq_add_stmt (iseq, g);
7265 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7266 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7267 OMP_CLAUSE_CHAIN (c) = *pclauses;
7268 *pclauses = c;
7269 tree clobber = build_constructor (type, NULL);
7270 TREE_THIS_VOLATILE (clobber) = 1;
7271 g = gimple_build_assign (array, clobber);
7272 gimple_seq_add_stmt (oseq, g);
7275 /* Lower the OpenMP parallel or task directive in the current statement
7276 in GSI_P. CTX holds context information for the directive. */
7278 static void
7279 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7281 tree clauses;
7282 tree child_fn, t;
7283 gimple *stmt = gsi_stmt (*gsi_p);
7284 gbind *par_bind, *bind, *dep_bind = NULL;
7285 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7286 location_t loc = gimple_location (stmt);
7288 clauses = gimple_omp_taskreg_clauses (stmt);
7289 par_bind
7290 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7291 par_body = gimple_bind_body (par_bind);
7292 child_fn = ctx->cb.dst_fn;
7293 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7294 && !gimple_omp_parallel_combined_p (stmt))
7296 struct walk_stmt_info wi;
7297 int ws_num = 0;
7299 memset (&wi, 0, sizeof (wi));
7300 wi.info = &ws_num;
7301 wi.val_only = true;
7302 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7303 if (ws_num == 1)
7304 gimple_omp_parallel_set_combined_p (stmt, true);
7306 gimple_seq dep_ilist = NULL;
7307 gimple_seq dep_olist = NULL;
7308 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7309 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7311 push_gimplify_context ();
7312 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7313 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7314 &dep_ilist, &dep_olist);
7317 if (ctx->srecord_type)
7318 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7320 push_gimplify_context ();
7322 par_olist = NULL;
7323 par_ilist = NULL;
7324 par_rlist = NULL;
7325 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7326 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7327 if (phony_construct && ctx->record_type)
7329 gcc_checking_assert (!ctx->receiver_decl);
7330 ctx->receiver_decl = create_tmp_var
7331 (build_reference_type (ctx->record_type), ".omp_rec");
7333 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7334 lower_omp (&par_body, ctx);
7335 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7336 lower_reduction_clauses (clauses, &par_rlist, ctx);
7338 /* Declare all the variables created by mapping and the variables
7339 declared in the scope of the parallel body. */
7340 record_vars_into (ctx->block_vars, child_fn);
7341 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7343 if (ctx->record_type)
7345 ctx->sender_decl
7346 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7347 : ctx->record_type, ".omp_data_o");
7348 DECL_NAMELESS (ctx->sender_decl) = 1;
7349 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7350 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7353 olist = NULL;
7354 ilist = NULL;
7355 lower_send_clauses (clauses, &ilist, &olist, ctx);
7356 lower_send_shared_vars (&ilist, &olist, ctx);
7358 if (ctx->record_type)
7360 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7361 TREE_THIS_VOLATILE (clobber) = 1;
7362 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7363 clobber));
7366 /* Once all the expansions are done, sequence all the different
7367 fragments inside gimple_omp_body. */
7369 new_body = NULL;
7371 if (ctx->record_type)
7373 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7374 /* fixup_child_record_type might have changed receiver_decl's type. */
7375 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7376 gimple_seq_add_stmt (&new_body,
7377 gimple_build_assign (ctx->receiver_decl, t));
7380 gimple_seq_add_seq (&new_body, par_ilist);
7381 gimple_seq_add_seq (&new_body, par_body);
7382 gimple_seq_add_seq (&new_body, par_rlist);
7383 if (ctx->cancellable)
7384 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7385 gimple_seq_add_seq (&new_body, par_olist);
7386 new_body = maybe_catch_exception (new_body);
7387 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7388 gimple_seq_add_stmt (&new_body,
7389 gimple_build_omp_continue (integer_zero_node,
7390 integer_zero_node));
7391 if (!phony_construct)
7393 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7394 gimple_omp_set_body (stmt, new_body);
7397 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7398 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7399 gimple_bind_add_seq (bind, ilist);
7400 if (!phony_construct)
7401 gimple_bind_add_stmt (bind, stmt);
7402 else
7403 gimple_bind_add_seq (bind, new_body);
7404 gimple_bind_add_seq (bind, olist);
7406 pop_gimplify_context (NULL);
7408 if (dep_bind)
7410 gimple_bind_add_seq (dep_bind, dep_ilist);
7411 gimple_bind_add_stmt (dep_bind, bind);
7412 gimple_bind_add_seq (dep_bind, dep_olist);
7413 pop_gimplify_context (dep_bind);
7417 /* Lower the GIMPLE_OMP_TARGET in the current statement
7418 in GSI_P. CTX holds context information for the directive. */
7420 static void
7421 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7423 tree clauses;
7424 tree child_fn, t, c;
7425 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7426 gbind *tgt_bind, *bind, *dep_bind = NULL;
7427 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7428 location_t loc = gimple_location (stmt);
7429 bool offloaded, data_region;
7430 unsigned int map_cnt = 0;
7432 offloaded = is_gimple_omp_offloaded (stmt);
7433 switch (gimple_omp_target_kind (stmt))
7435 case GF_OMP_TARGET_KIND_REGION:
7436 case GF_OMP_TARGET_KIND_UPDATE:
7437 case GF_OMP_TARGET_KIND_ENTER_DATA:
7438 case GF_OMP_TARGET_KIND_EXIT_DATA:
7439 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7440 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7441 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7442 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7443 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7444 data_region = false;
7445 break;
7446 case GF_OMP_TARGET_KIND_DATA:
7447 case GF_OMP_TARGET_KIND_OACC_DATA:
7448 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7449 data_region = true;
7450 break;
7451 default:
7452 gcc_unreachable ();
7455 clauses = gimple_omp_target_clauses (stmt);
7457 gimple_seq dep_ilist = NULL;
7458 gimple_seq dep_olist = NULL;
7459 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7461 push_gimplify_context ();
7462 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7463 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7464 &dep_ilist, &dep_olist);
7467 tgt_bind = NULL;
7468 tgt_body = NULL;
7469 if (offloaded)
7471 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7472 tgt_body = gimple_bind_body (tgt_bind);
7474 else if (data_region)
7475 tgt_body = gimple_omp_body (stmt);
7476 child_fn = ctx->cb.dst_fn;
7478 push_gimplify_context ();
7479 fplist = NULL;
7481 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7482 switch (OMP_CLAUSE_CODE (c))
7484 tree var, x;
7486 default:
7487 break;
7488 case OMP_CLAUSE_MAP:
7489 #if CHECKING_P
7490 /* First check what we're prepared to handle in the following. */
7491 switch (OMP_CLAUSE_MAP_KIND (c))
7493 case GOMP_MAP_ALLOC:
7494 case GOMP_MAP_TO:
7495 case GOMP_MAP_FROM:
7496 case GOMP_MAP_TOFROM:
7497 case GOMP_MAP_POINTER:
7498 case GOMP_MAP_TO_PSET:
7499 case GOMP_MAP_DELETE:
7500 case GOMP_MAP_RELEASE:
7501 case GOMP_MAP_ALWAYS_TO:
7502 case GOMP_MAP_ALWAYS_FROM:
7503 case GOMP_MAP_ALWAYS_TOFROM:
7504 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7505 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7506 case GOMP_MAP_STRUCT:
7507 case GOMP_MAP_ALWAYS_POINTER:
7508 break;
7509 case GOMP_MAP_FORCE_ALLOC:
7510 case GOMP_MAP_FORCE_TO:
7511 case GOMP_MAP_FORCE_FROM:
7512 case GOMP_MAP_FORCE_TOFROM:
7513 case GOMP_MAP_FORCE_PRESENT:
7514 case GOMP_MAP_FORCE_DEVICEPTR:
7515 case GOMP_MAP_DEVICE_RESIDENT:
7516 case GOMP_MAP_LINK:
7517 gcc_assert (is_gimple_omp_oacc (stmt));
7518 break;
7519 default:
7520 gcc_unreachable ();
7522 #endif
7523 /* FALLTHRU */
7524 case OMP_CLAUSE_TO:
7525 case OMP_CLAUSE_FROM:
7526 oacc_firstprivate:
7527 var = OMP_CLAUSE_DECL (c);
7528 if (!DECL_P (var))
7530 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7531 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7532 && (OMP_CLAUSE_MAP_KIND (c)
7533 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7534 map_cnt++;
7535 continue;
7538 if (DECL_SIZE (var)
7539 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7541 tree var2 = DECL_VALUE_EXPR (var);
7542 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7543 var2 = TREE_OPERAND (var2, 0);
7544 gcc_assert (DECL_P (var2));
7545 var = var2;
7548 if (offloaded
7549 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7550 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7551 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7553 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7555 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7556 && varpool_node::get_create (var)->offloadable)
7557 continue;
7559 tree type = build_pointer_type (TREE_TYPE (var));
7560 tree new_var = lookup_decl (var, ctx);
7561 x = create_tmp_var_raw (type, get_name (new_var));
7562 gimple_add_tmp_var (x);
7563 x = build_simple_mem_ref (x);
7564 SET_DECL_VALUE_EXPR (new_var, x);
7565 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7567 continue;
7570 if (!maybe_lookup_field (var, ctx))
7571 continue;
7573 /* Don't remap oacc parallel reduction variables, because the
7574 intermediate result must be local to each gang. */
7575 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7576 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7578 x = build_receiver_ref (var, true, ctx);
7579 tree new_var = lookup_decl (var, ctx);
7581 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7582 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7583 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7584 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7585 x = build_simple_mem_ref (x);
7586 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7588 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7589 if (omp_is_reference (new_var))
7591 /* Create a local object to hold the instance
7592 value. */
7593 tree type = TREE_TYPE (TREE_TYPE (new_var));
7594 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7595 tree inst = create_tmp_var (type, id);
7596 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7597 x = build_fold_addr_expr (inst);
7599 gimplify_assign (new_var, x, &fplist);
7601 else if (DECL_P (new_var))
7603 SET_DECL_VALUE_EXPR (new_var, x);
7604 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7606 else
7607 gcc_unreachable ();
7609 map_cnt++;
7610 break;
7612 case OMP_CLAUSE_FIRSTPRIVATE:
7613 if (is_oacc_parallel (ctx))
7614 goto oacc_firstprivate;
7615 map_cnt++;
7616 var = OMP_CLAUSE_DECL (c);
7617 if (!omp_is_reference (var)
7618 && !is_gimple_reg_type (TREE_TYPE (var)))
7620 tree new_var = lookup_decl (var, ctx);
7621 if (is_variable_sized (var))
7623 tree pvar = DECL_VALUE_EXPR (var);
7624 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7625 pvar = TREE_OPERAND (pvar, 0);
7626 gcc_assert (DECL_P (pvar));
7627 tree new_pvar = lookup_decl (pvar, ctx);
7628 x = build_fold_indirect_ref (new_pvar);
7629 TREE_THIS_NOTRAP (x) = 1;
7631 else
7632 x = build_receiver_ref (var, true, ctx);
7633 SET_DECL_VALUE_EXPR (new_var, x);
7634 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7636 break;
7638 case OMP_CLAUSE_PRIVATE:
7639 if (is_gimple_omp_oacc (ctx->stmt))
7640 break;
7641 var = OMP_CLAUSE_DECL (c);
7642 if (is_variable_sized (var))
7644 tree new_var = lookup_decl (var, ctx);
7645 tree pvar = DECL_VALUE_EXPR (var);
7646 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7647 pvar = TREE_OPERAND (pvar, 0);
7648 gcc_assert (DECL_P (pvar));
7649 tree new_pvar = lookup_decl (pvar, ctx);
7650 x = build_fold_indirect_ref (new_pvar);
7651 TREE_THIS_NOTRAP (x) = 1;
7652 SET_DECL_VALUE_EXPR (new_var, x);
7653 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7655 break;
7657 case OMP_CLAUSE_USE_DEVICE_PTR:
7658 case OMP_CLAUSE_IS_DEVICE_PTR:
7659 var = OMP_CLAUSE_DECL (c);
7660 map_cnt++;
7661 if (is_variable_sized (var))
7663 tree new_var = lookup_decl (var, ctx);
7664 tree pvar = DECL_VALUE_EXPR (var);
7665 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7666 pvar = TREE_OPERAND (pvar, 0);
7667 gcc_assert (DECL_P (pvar));
7668 tree new_pvar = lookup_decl (pvar, ctx);
7669 x = build_fold_indirect_ref (new_pvar);
7670 TREE_THIS_NOTRAP (x) = 1;
7671 SET_DECL_VALUE_EXPR (new_var, x);
7672 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7674 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7676 tree new_var = lookup_decl (var, ctx);
7677 tree type = build_pointer_type (TREE_TYPE (var));
7678 x = create_tmp_var_raw (type, get_name (new_var));
7679 gimple_add_tmp_var (x);
7680 x = build_simple_mem_ref (x);
7681 SET_DECL_VALUE_EXPR (new_var, x);
7682 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7684 else
7686 tree new_var = lookup_decl (var, ctx);
7687 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7688 gimple_add_tmp_var (x);
7689 SET_DECL_VALUE_EXPR (new_var, x);
7690 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7692 break;
7695 if (offloaded)
7697 target_nesting_level++;
7698 lower_omp (&tgt_body, ctx);
7699 target_nesting_level--;
7701 else if (data_region)
7702 lower_omp (&tgt_body, ctx);
7704 if (offloaded)
7706 /* Declare all the variables created by mapping and the variables
7707 declared in the scope of the target body. */
7708 record_vars_into (ctx->block_vars, child_fn);
7709 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7712 olist = NULL;
7713 ilist = NULL;
7714 if (ctx->record_type)
7716 ctx->sender_decl
7717 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7718 DECL_NAMELESS (ctx->sender_decl) = 1;
7719 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7720 t = make_tree_vec (3);
7721 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7722 TREE_VEC_ELT (t, 1)
7723 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7724 ".omp_data_sizes");
7725 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7726 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7727 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7728 tree tkind_type = short_unsigned_type_node;
7729 int talign_shift = 8;
7730 TREE_VEC_ELT (t, 2)
7731 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7732 ".omp_data_kinds");
7733 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7734 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7735 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7736 gimple_omp_target_set_data_arg (stmt, t);
7738 vec<constructor_elt, va_gc> *vsize;
7739 vec<constructor_elt, va_gc> *vkind;
7740 vec_alloc (vsize, map_cnt);
7741 vec_alloc (vkind, map_cnt);
7742 unsigned int map_idx = 0;
7744 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7745 switch (OMP_CLAUSE_CODE (c))
7747 tree ovar, nc, s, purpose, var, x, type;
7748 unsigned int talign;
7750 default:
7751 break;
7753 case OMP_CLAUSE_MAP:
7754 case OMP_CLAUSE_TO:
7755 case OMP_CLAUSE_FROM:
7756 oacc_firstprivate_map:
7757 nc = c;
7758 ovar = OMP_CLAUSE_DECL (c);
7759 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7760 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7761 || (OMP_CLAUSE_MAP_KIND (c)
7762 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7763 break;
7764 if (!DECL_P (ovar))
7766 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7767 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7769 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7770 == get_base_address (ovar));
7771 nc = OMP_CLAUSE_CHAIN (c);
7772 ovar = OMP_CLAUSE_DECL (nc);
7774 else
7776 tree x = build_sender_ref (ovar, ctx);
7777 tree v
7778 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7779 gimplify_assign (x, v, &ilist);
7780 nc = NULL_TREE;
7783 else
7785 if (DECL_SIZE (ovar)
7786 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7788 tree ovar2 = DECL_VALUE_EXPR (ovar);
7789 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7790 ovar2 = TREE_OPERAND (ovar2, 0);
7791 gcc_assert (DECL_P (ovar2));
7792 ovar = ovar2;
7794 if (!maybe_lookup_field (ovar, ctx))
7795 continue;
7798 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7799 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7800 talign = DECL_ALIGN_UNIT (ovar);
7801 if (nc)
7803 var = lookup_decl_in_outer_ctx (ovar, ctx);
7804 x = build_sender_ref (ovar, ctx);
7806 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7807 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7808 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7809 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7811 gcc_assert (offloaded);
7812 tree avar
7813 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7814 mark_addressable (avar);
7815 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7816 talign = DECL_ALIGN_UNIT (avar);
7817 avar = build_fold_addr_expr (avar);
7818 gimplify_assign (x, avar, &ilist);
7820 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7822 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7823 if (!omp_is_reference (var))
7825 if (is_gimple_reg (var)
7826 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7827 TREE_NO_WARNING (var) = 1;
7828 var = build_fold_addr_expr (var);
7830 else
7831 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7832 gimplify_assign (x, var, &ilist);
7834 else if (is_gimple_reg (var))
7836 gcc_assert (offloaded);
7837 tree avar = create_tmp_var (TREE_TYPE (var));
7838 mark_addressable (avar);
7839 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7840 if (GOMP_MAP_COPY_TO_P (map_kind)
7841 || map_kind == GOMP_MAP_POINTER
7842 || map_kind == GOMP_MAP_TO_PSET
7843 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7845 /* If we need to initialize a temporary
7846 with VAR because it is not addressable, and
7847 the variable hasn't been initialized yet, then
7848 we'll get a warning for the store to avar.
7849 Don't warn in that case, the mapping might
7850 be implicit. */
7851 TREE_NO_WARNING (var) = 1;
7852 gimplify_assign (avar, var, &ilist);
7854 avar = build_fold_addr_expr (avar);
7855 gimplify_assign (x, avar, &ilist);
7856 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7857 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7858 && !TYPE_READONLY (TREE_TYPE (var)))
7860 x = unshare_expr (x);
7861 x = build_simple_mem_ref (x);
7862 gimplify_assign (var, x, &olist);
7865 else
7867 var = build_fold_addr_expr (var);
7868 gimplify_assign (x, var, &ilist);
7871 s = NULL_TREE;
7872 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7874 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7875 s = TREE_TYPE (ovar);
7876 if (TREE_CODE (s) == REFERENCE_TYPE)
7877 s = TREE_TYPE (s);
7878 s = TYPE_SIZE_UNIT (s);
7880 else
7881 s = OMP_CLAUSE_SIZE (c);
7882 if (s == NULL_TREE)
7883 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7884 s = fold_convert (size_type_node, s);
7885 purpose = size_int (map_idx++);
7886 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7887 if (TREE_CODE (s) != INTEGER_CST)
7888 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7890 unsigned HOST_WIDE_INT tkind, tkind_zero;
7891 switch (OMP_CLAUSE_CODE (c))
7893 case OMP_CLAUSE_MAP:
7894 tkind = OMP_CLAUSE_MAP_KIND (c);
7895 tkind_zero = tkind;
7896 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7897 switch (tkind)
7899 case GOMP_MAP_ALLOC:
7900 case GOMP_MAP_TO:
7901 case GOMP_MAP_FROM:
7902 case GOMP_MAP_TOFROM:
7903 case GOMP_MAP_ALWAYS_TO:
7904 case GOMP_MAP_ALWAYS_FROM:
7905 case GOMP_MAP_ALWAYS_TOFROM:
7906 case GOMP_MAP_RELEASE:
7907 case GOMP_MAP_FORCE_TO:
7908 case GOMP_MAP_FORCE_FROM:
7909 case GOMP_MAP_FORCE_TOFROM:
7910 case GOMP_MAP_FORCE_PRESENT:
7911 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7912 break;
7913 case GOMP_MAP_DELETE:
7914 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7915 default:
7916 break;
7918 if (tkind_zero != tkind)
7920 if (integer_zerop (s))
7921 tkind = tkind_zero;
7922 else if (integer_nonzerop (s))
7923 tkind_zero = tkind;
7925 break;
7926 case OMP_CLAUSE_FIRSTPRIVATE:
7927 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7928 tkind = GOMP_MAP_TO;
7929 tkind_zero = tkind;
7930 break;
7931 case OMP_CLAUSE_TO:
7932 tkind = GOMP_MAP_TO;
7933 tkind_zero = tkind;
7934 break;
7935 case OMP_CLAUSE_FROM:
7936 tkind = GOMP_MAP_FROM;
7937 tkind_zero = tkind;
7938 break;
7939 default:
7940 gcc_unreachable ();
7942 gcc_checking_assert (tkind
7943 < (HOST_WIDE_INT_C (1U) << talign_shift));
7944 gcc_checking_assert (tkind_zero
7945 < (HOST_WIDE_INT_C (1U) << talign_shift));
7946 talign = ceil_log2 (talign);
7947 tkind |= talign << talign_shift;
7948 tkind_zero |= talign << talign_shift;
7949 gcc_checking_assert (tkind
7950 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7951 gcc_checking_assert (tkind_zero
7952 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7953 if (tkind == tkind_zero)
7954 x = build_int_cstu (tkind_type, tkind);
7955 else
7957 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
7958 x = build3 (COND_EXPR, tkind_type,
7959 fold_build2 (EQ_EXPR, boolean_type_node,
7960 unshare_expr (s), size_zero_node),
7961 build_int_cstu (tkind_type, tkind_zero),
7962 build_int_cstu (tkind_type, tkind));
7964 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
7965 if (nc && nc != c)
7966 c = nc;
7967 break;
7969 case OMP_CLAUSE_FIRSTPRIVATE:
7970 if (is_oacc_parallel (ctx))
7971 goto oacc_firstprivate_map;
7972 ovar = OMP_CLAUSE_DECL (c);
7973 if (omp_is_reference (ovar))
7974 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7975 else
7976 talign = DECL_ALIGN_UNIT (ovar);
7977 var = lookup_decl_in_outer_ctx (ovar, ctx);
7978 x = build_sender_ref (ovar, ctx);
7979 tkind = GOMP_MAP_FIRSTPRIVATE;
7980 type = TREE_TYPE (ovar);
7981 if (omp_is_reference (ovar))
7982 type = TREE_TYPE (type);
7983 if ((INTEGRAL_TYPE_P (type)
7984 && TYPE_PRECISION (type) <= POINTER_SIZE)
7985 || TREE_CODE (type) == POINTER_TYPE)
7987 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
7988 tree t = var;
7989 if (omp_is_reference (var))
7990 t = build_simple_mem_ref (var);
7991 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7992 TREE_NO_WARNING (var) = 1;
7993 if (TREE_CODE (type) != POINTER_TYPE)
7994 t = fold_convert (pointer_sized_int_node, t);
7995 t = fold_convert (TREE_TYPE (x), t);
7996 gimplify_assign (x, t, &ilist);
7998 else if (omp_is_reference (var))
7999 gimplify_assign (x, var, &ilist);
8000 else if (is_gimple_reg (var))
8002 tree avar = create_tmp_var (TREE_TYPE (var));
8003 mark_addressable (avar);
8004 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8005 TREE_NO_WARNING (var) = 1;
8006 gimplify_assign (avar, var, &ilist);
8007 avar = build_fold_addr_expr (avar);
8008 gimplify_assign (x, avar, &ilist);
8010 else
8012 var = build_fold_addr_expr (var);
8013 gimplify_assign (x, var, &ilist);
8015 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8016 s = size_int (0);
8017 else if (omp_is_reference (ovar))
8018 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8019 else
8020 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8021 s = fold_convert (size_type_node, s);
8022 purpose = size_int (map_idx++);
8023 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8024 if (TREE_CODE (s) != INTEGER_CST)
8025 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8027 gcc_checking_assert (tkind
8028 < (HOST_WIDE_INT_C (1U) << talign_shift));
8029 talign = ceil_log2 (talign);
8030 tkind |= talign << talign_shift;
8031 gcc_checking_assert (tkind
8032 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8033 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8034 build_int_cstu (tkind_type, tkind));
8035 break;
8037 case OMP_CLAUSE_USE_DEVICE_PTR:
8038 case OMP_CLAUSE_IS_DEVICE_PTR:
8039 ovar = OMP_CLAUSE_DECL (c);
8040 var = lookup_decl_in_outer_ctx (ovar, ctx);
8041 x = build_sender_ref (ovar, ctx);
8042 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8043 tkind = GOMP_MAP_USE_DEVICE_PTR;
8044 else
8045 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8046 type = TREE_TYPE (ovar);
8047 if (TREE_CODE (type) == ARRAY_TYPE)
8048 var = build_fold_addr_expr (var);
8049 else
8051 if (omp_is_reference (ovar))
8053 type = TREE_TYPE (type);
8054 if (TREE_CODE (type) != ARRAY_TYPE)
8055 var = build_simple_mem_ref (var);
8056 var = fold_convert (TREE_TYPE (x), var);
8059 gimplify_assign (x, var, &ilist);
8060 s = size_int (0);
8061 purpose = size_int (map_idx++);
8062 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8063 gcc_checking_assert (tkind
8064 < (HOST_WIDE_INT_C (1U) << talign_shift));
8065 gcc_checking_assert (tkind
8066 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8067 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8068 build_int_cstu (tkind_type, tkind));
8069 break;
8072 gcc_assert (map_idx == map_cnt);
8074 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8075 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8076 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8077 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8078 for (int i = 1; i <= 2; i++)
8079 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8081 gimple_seq initlist = NULL;
8082 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8083 TREE_VEC_ELT (t, i)),
8084 &initlist, true, NULL_TREE);
8085 gimple_seq_add_seq (&ilist, initlist);
8087 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8088 NULL);
8089 TREE_THIS_VOLATILE (clobber) = 1;
8090 gimple_seq_add_stmt (&olist,
8091 gimple_build_assign (TREE_VEC_ELT (t, i),
8092 clobber));
8095 tree clobber = build_constructor (ctx->record_type, NULL);
8096 TREE_THIS_VOLATILE (clobber) = 1;
8097 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8098 clobber));
8101 /* Once all the expansions are done, sequence all the different
8102 fragments inside gimple_omp_body. */
8104 new_body = NULL;
8106 if (offloaded
8107 && ctx->record_type)
8109 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8110 /* fixup_child_record_type might have changed receiver_decl's type. */
8111 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8112 gimple_seq_add_stmt (&new_body,
8113 gimple_build_assign (ctx->receiver_decl, t));
8115 gimple_seq_add_seq (&new_body, fplist);
8117 if (offloaded || data_region)
8119 tree prev = NULL_TREE;
8120 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8121 switch (OMP_CLAUSE_CODE (c))
8123 tree var, x;
8124 default:
8125 break;
8126 case OMP_CLAUSE_FIRSTPRIVATE:
8127 if (is_gimple_omp_oacc (ctx->stmt))
8128 break;
8129 var = OMP_CLAUSE_DECL (c);
8130 if (omp_is_reference (var)
8131 || is_gimple_reg_type (TREE_TYPE (var)))
8133 tree new_var = lookup_decl (var, ctx);
8134 tree type;
8135 type = TREE_TYPE (var);
8136 if (omp_is_reference (var))
8137 type = TREE_TYPE (type);
8138 if ((INTEGRAL_TYPE_P (type)
8139 && TYPE_PRECISION (type) <= POINTER_SIZE)
8140 || TREE_CODE (type) == POINTER_TYPE)
8142 x = build_receiver_ref (var, false, ctx);
8143 if (TREE_CODE (type) != POINTER_TYPE)
8144 x = fold_convert (pointer_sized_int_node, x);
8145 x = fold_convert (type, x);
8146 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8147 fb_rvalue);
8148 if (omp_is_reference (var))
8150 tree v = create_tmp_var_raw (type, get_name (var));
8151 gimple_add_tmp_var (v);
8152 TREE_ADDRESSABLE (v) = 1;
8153 gimple_seq_add_stmt (&new_body,
8154 gimple_build_assign (v, x));
8155 x = build_fold_addr_expr (v);
8157 gimple_seq_add_stmt (&new_body,
8158 gimple_build_assign (new_var, x));
8160 else
8162 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8163 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8164 fb_rvalue);
8165 gimple_seq_add_stmt (&new_body,
8166 gimple_build_assign (new_var, x));
8169 else if (is_variable_sized (var))
8171 tree pvar = DECL_VALUE_EXPR (var);
8172 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8173 pvar = TREE_OPERAND (pvar, 0);
8174 gcc_assert (DECL_P (pvar));
8175 tree new_var = lookup_decl (pvar, ctx);
8176 x = build_receiver_ref (var, false, ctx);
8177 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8178 gimple_seq_add_stmt (&new_body,
8179 gimple_build_assign (new_var, x));
8181 break;
8182 case OMP_CLAUSE_PRIVATE:
8183 if (is_gimple_omp_oacc (ctx->stmt))
8184 break;
8185 var = OMP_CLAUSE_DECL (c);
8186 if (omp_is_reference (var))
8188 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8189 tree new_var = lookup_decl (var, ctx);
8190 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8191 if (TREE_CONSTANT (x))
8193 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8194 get_name (var));
8195 gimple_add_tmp_var (x);
8196 TREE_ADDRESSABLE (x) = 1;
8197 x = build_fold_addr_expr_loc (clause_loc, x);
8199 else
8200 break;
8202 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8203 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8204 gimple_seq_add_stmt (&new_body,
8205 gimple_build_assign (new_var, x));
8207 break;
8208 case OMP_CLAUSE_USE_DEVICE_PTR:
8209 case OMP_CLAUSE_IS_DEVICE_PTR:
8210 var = OMP_CLAUSE_DECL (c);
8211 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8212 x = build_sender_ref (var, ctx);
8213 else
8214 x = build_receiver_ref (var, false, ctx);
8215 if (is_variable_sized (var))
8217 tree pvar = DECL_VALUE_EXPR (var);
8218 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8219 pvar = TREE_OPERAND (pvar, 0);
8220 gcc_assert (DECL_P (pvar));
8221 tree new_var = lookup_decl (pvar, ctx);
8222 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8223 gimple_seq_add_stmt (&new_body,
8224 gimple_build_assign (new_var, x));
8226 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8228 tree new_var = lookup_decl (var, ctx);
8229 new_var = DECL_VALUE_EXPR (new_var);
8230 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8231 new_var = TREE_OPERAND (new_var, 0);
8232 gcc_assert (DECL_P (new_var));
8233 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8234 gimple_seq_add_stmt (&new_body,
8235 gimple_build_assign (new_var, x));
8237 else
8239 tree type = TREE_TYPE (var);
8240 tree new_var = lookup_decl (var, ctx);
8241 if (omp_is_reference (var))
8243 type = TREE_TYPE (type);
8244 if (TREE_CODE (type) != ARRAY_TYPE)
8246 tree v = create_tmp_var_raw (type, get_name (var));
8247 gimple_add_tmp_var (v);
8248 TREE_ADDRESSABLE (v) = 1;
8249 x = fold_convert (type, x);
8250 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8251 fb_rvalue);
8252 gimple_seq_add_stmt (&new_body,
8253 gimple_build_assign (v, x));
8254 x = build_fold_addr_expr (v);
8257 new_var = DECL_VALUE_EXPR (new_var);
8258 x = fold_convert (TREE_TYPE (new_var), x);
8259 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8260 gimple_seq_add_stmt (&new_body,
8261 gimple_build_assign (new_var, x));
8263 break;
8265 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8266 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8267 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8268 or references to VLAs. */
8269 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8270 switch (OMP_CLAUSE_CODE (c))
8272 tree var;
8273 default:
8274 break;
8275 case OMP_CLAUSE_MAP:
8276 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8277 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8279 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8280 HOST_WIDE_INT offset = 0;
8281 gcc_assert (prev);
8282 var = OMP_CLAUSE_DECL (c);
8283 if (DECL_P (var)
8284 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8285 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8286 ctx))
8287 && varpool_node::get_create (var)->offloadable)
8288 break;
8289 if (TREE_CODE (var) == INDIRECT_REF
8290 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8291 var = TREE_OPERAND (var, 0);
8292 if (TREE_CODE (var) == COMPONENT_REF)
8294 var = get_addr_base_and_unit_offset (var, &offset);
8295 gcc_assert (var != NULL_TREE && DECL_P (var));
8297 else if (DECL_SIZE (var)
8298 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8300 tree var2 = DECL_VALUE_EXPR (var);
8301 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8302 var2 = TREE_OPERAND (var2, 0);
8303 gcc_assert (DECL_P (var2));
8304 var = var2;
8306 tree new_var = lookup_decl (var, ctx), x;
8307 tree type = TREE_TYPE (new_var);
8308 bool is_ref;
8309 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8310 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8311 == COMPONENT_REF))
8313 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8314 is_ref = true;
8315 new_var = build2 (MEM_REF, type,
8316 build_fold_addr_expr (new_var),
8317 build_int_cst (build_pointer_type (type),
8318 offset));
8320 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8322 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8323 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8324 new_var = build2 (MEM_REF, type,
8325 build_fold_addr_expr (new_var),
8326 build_int_cst (build_pointer_type (type),
8327 offset));
8329 else
8330 is_ref = omp_is_reference (var);
8331 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8332 is_ref = false;
8333 bool ref_to_array = false;
8334 if (is_ref)
8336 type = TREE_TYPE (type);
8337 if (TREE_CODE (type) == ARRAY_TYPE)
8339 type = build_pointer_type (type);
8340 ref_to_array = true;
8343 else if (TREE_CODE (type) == ARRAY_TYPE)
8345 tree decl2 = DECL_VALUE_EXPR (new_var);
8346 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8347 decl2 = TREE_OPERAND (decl2, 0);
8348 gcc_assert (DECL_P (decl2));
8349 new_var = decl2;
8350 type = TREE_TYPE (new_var);
8352 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8353 x = fold_convert_loc (clause_loc, type, x);
8354 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8356 tree bias = OMP_CLAUSE_SIZE (c);
8357 if (DECL_P (bias))
8358 bias = lookup_decl (bias, ctx);
8359 bias = fold_convert_loc (clause_loc, sizetype, bias);
8360 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8361 bias);
8362 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8363 TREE_TYPE (x), x, bias);
8365 if (ref_to_array)
8366 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8367 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8368 if (is_ref && !ref_to_array)
8370 tree t = create_tmp_var_raw (type, get_name (var));
8371 gimple_add_tmp_var (t);
8372 TREE_ADDRESSABLE (t) = 1;
8373 gimple_seq_add_stmt (&new_body,
8374 gimple_build_assign (t, x));
8375 x = build_fold_addr_expr_loc (clause_loc, t);
8377 gimple_seq_add_stmt (&new_body,
8378 gimple_build_assign (new_var, x));
8379 prev = NULL_TREE;
8381 else if (OMP_CLAUSE_CHAIN (c)
8382 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8383 == OMP_CLAUSE_MAP
8384 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8385 == GOMP_MAP_FIRSTPRIVATE_POINTER
8386 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8387 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8388 prev = c;
8389 break;
8390 case OMP_CLAUSE_PRIVATE:
8391 var = OMP_CLAUSE_DECL (c);
8392 if (is_variable_sized (var))
8394 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8395 tree new_var = lookup_decl (var, ctx);
8396 tree pvar = DECL_VALUE_EXPR (var);
8397 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8398 pvar = TREE_OPERAND (pvar, 0);
8399 gcc_assert (DECL_P (pvar));
8400 tree new_pvar = lookup_decl (pvar, ctx);
8401 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8402 tree al = size_int (DECL_ALIGN (var));
8403 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8404 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8405 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8406 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8407 gimple_seq_add_stmt (&new_body,
8408 gimple_build_assign (new_pvar, x));
8410 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8412 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8413 tree new_var = lookup_decl (var, ctx);
8414 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8415 if (TREE_CONSTANT (x))
8416 break;
8417 else
8419 tree atmp
8420 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8421 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8422 tree al = size_int (TYPE_ALIGN (rtype));
8423 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8426 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8427 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8428 gimple_seq_add_stmt (&new_body,
8429 gimple_build_assign (new_var, x));
8431 break;
8434 gimple_seq fork_seq = NULL;
8435 gimple_seq join_seq = NULL;
8437 if (is_oacc_parallel (ctx))
8439 /* If there are reductions on the offloaded region itself, treat
8440 them as a dummy GANG loop. */
8441 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8443 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8444 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8447 gimple_seq_add_seq (&new_body, fork_seq);
8448 gimple_seq_add_seq (&new_body, tgt_body);
8449 gimple_seq_add_seq (&new_body, join_seq);
8451 if (offloaded)
8452 new_body = maybe_catch_exception (new_body);
8454 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8455 gimple_omp_set_body (stmt, new_body);
8458 bind = gimple_build_bind (NULL, NULL,
8459 tgt_bind ? gimple_bind_block (tgt_bind)
8460 : NULL_TREE);
8461 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8462 gimple_bind_add_seq (bind, ilist);
8463 gimple_bind_add_stmt (bind, stmt);
8464 gimple_bind_add_seq (bind, olist);
8466 pop_gimplify_context (NULL);
8468 if (dep_bind)
8470 gimple_bind_add_seq (dep_bind, dep_ilist);
8471 gimple_bind_add_stmt (dep_bind, bind);
8472 gimple_bind_add_seq (dep_bind, dep_olist);
8473 pop_gimplify_context (dep_bind);
8477 /* Expand code for an OpenMP teams directive. */
8479 static void
8480 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8482 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8483 push_gimplify_context ();
8485 tree block = make_node (BLOCK);
8486 gbind *bind = gimple_build_bind (NULL, NULL, block);
8487 gsi_replace (gsi_p, bind, true);
8488 gimple_seq bind_body = NULL;
8489 gimple_seq dlist = NULL;
8490 gimple_seq olist = NULL;
8492 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8493 OMP_CLAUSE_NUM_TEAMS);
8494 if (num_teams == NULL_TREE)
8495 num_teams = build_int_cst (unsigned_type_node, 0);
8496 else
8498 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8499 num_teams = fold_convert (unsigned_type_node, num_teams);
8500 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8502 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8503 OMP_CLAUSE_THREAD_LIMIT);
8504 if (thread_limit == NULL_TREE)
8505 thread_limit = build_int_cst (unsigned_type_node, 0);
8506 else
8508 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8509 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8510 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8511 fb_rvalue);
8514 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8515 &bind_body, &dlist, ctx, NULL);
8516 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8517 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8518 if (!gimple_omp_teams_grid_phony (teams_stmt))
8520 gimple_seq_add_stmt (&bind_body, teams_stmt);
8521 location_t loc = gimple_location (teams_stmt);
8522 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8523 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8524 gimple_set_location (call, loc);
8525 gimple_seq_add_stmt (&bind_body, call);
8528 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8529 gimple_omp_set_body (teams_stmt, NULL);
8530 gimple_seq_add_seq (&bind_body, olist);
8531 gimple_seq_add_seq (&bind_body, dlist);
8532 if (!gimple_omp_teams_grid_phony (teams_stmt))
8533 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8534 gimple_bind_set_body (bind, bind_body);
8536 pop_gimplify_context (bind);
8538 gimple_bind_append_vars (bind, ctx->block_vars);
8539 BLOCK_VARS (block) = ctx->block_vars;
8540 if (BLOCK_VARS (block))
8541 TREE_USED (block) = 1;
8544 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8546 static void
8547 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8549 gimple *stmt = gsi_stmt (*gsi_p);
8550 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8551 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8552 gimple_build_omp_return (false));
8556 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8557 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8558 of OMP context, but with task_shared_vars set. */
8560 static tree
8561 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8562 void *data)
8564 tree t = *tp;
8566 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8567 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8568 return t;
8570 if (task_shared_vars
8571 && DECL_P (t)
8572 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8573 return t;
8575 /* If a global variable has been privatized, TREE_CONSTANT on
8576 ADDR_EXPR might be wrong. */
8577 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8578 recompute_tree_invariant_for_addr_expr (t);
8580 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8581 return NULL_TREE;
8584 /* Data to be communicated between lower_omp_regimplify_operands and
8585 lower_omp_regimplify_operands_p. */
8587 struct lower_omp_regimplify_operands_data
8589 omp_context *ctx;
8590 vec<tree> *decls;
8593 /* Helper function for lower_omp_regimplify_operands. Find
8594 omp_member_access_dummy_var vars and adjust temporarily their
8595 DECL_VALUE_EXPRs if needed. */
8597 static tree
8598 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8599 void *data)
8601 tree t = omp_member_access_dummy_var (*tp);
8602 if (t)
8604 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8605 lower_omp_regimplify_operands_data *ldata
8606 = (lower_omp_regimplify_operands_data *) wi->info;
8607 tree o = maybe_lookup_decl (t, ldata->ctx);
8608 if (o != t)
8610 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8611 ldata->decls->safe_push (*tp);
8612 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8613 SET_DECL_VALUE_EXPR (*tp, v);
8616 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8617 return NULL_TREE;
8620 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8621 of omp_member_access_dummy_var vars during regimplification. */
8623 static void
8624 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8625 gimple_stmt_iterator *gsi_p)
8627 auto_vec<tree, 10> decls;
8628 if (ctx)
8630 struct walk_stmt_info wi;
8631 memset (&wi, '\0', sizeof (wi));
8632 struct lower_omp_regimplify_operands_data data;
8633 data.ctx = ctx;
8634 data.decls = &decls;
8635 wi.info = &data;
8636 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8638 gimple_regimplify_operands (stmt, gsi_p);
8639 while (!decls.is_empty ())
8641 tree t = decls.pop ();
8642 tree v = decls.pop ();
8643 SET_DECL_VALUE_EXPR (t, v);
8647 static void
8648 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8650 gimple *stmt = gsi_stmt (*gsi_p);
8651 struct walk_stmt_info wi;
8652 gcall *call_stmt;
8654 if (gimple_has_location (stmt))
8655 input_location = gimple_location (stmt);
8657 if (task_shared_vars)
8658 memset (&wi, '\0', sizeof (wi));
8660 /* If we have issued syntax errors, avoid doing any heavy lifting.
8661 Just replace the OMP directives with a NOP to avoid
8662 confusing RTL expansion. */
8663 if (seen_error () && is_gimple_omp (stmt))
8665 gsi_replace (gsi_p, gimple_build_nop (), true);
8666 return;
8669 switch (gimple_code (stmt))
8671 case GIMPLE_COND:
8673 gcond *cond_stmt = as_a <gcond *> (stmt);
8674 if ((ctx || task_shared_vars)
8675 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8676 lower_omp_regimplify_p,
8677 ctx ? NULL : &wi, NULL)
8678 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8679 lower_omp_regimplify_p,
8680 ctx ? NULL : &wi, NULL)))
8681 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8683 break;
8684 case GIMPLE_CATCH:
8685 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8686 break;
8687 case GIMPLE_EH_FILTER:
8688 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8689 break;
8690 case GIMPLE_TRY:
8691 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8692 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8693 break;
8694 case GIMPLE_TRANSACTION:
8695 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8696 ctx);
8697 break;
8698 case GIMPLE_BIND:
8699 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8700 break;
8701 case GIMPLE_OMP_PARALLEL:
8702 case GIMPLE_OMP_TASK:
8703 ctx = maybe_lookup_ctx (stmt);
8704 gcc_assert (ctx);
8705 if (ctx->cancellable)
8706 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8707 lower_omp_taskreg (gsi_p, ctx);
8708 break;
8709 case GIMPLE_OMP_FOR:
8710 ctx = maybe_lookup_ctx (stmt);
8711 gcc_assert (ctx);
8712 if (ctx->cancellable)
8713 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8714 lower_omp_for (gsi_p, ctx);
8715 break;
8716 case GIMPLE_OMP_SECTIONS:
8717 ctx = maybe_lookup_ctx (stmt);
8718 gcc_assert (ctx);
8719 if (ctx->cancellable)
8720 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8721 lower_omp_sections (gsi_p, ctx);
8722 break;
8723 case GIMPLE_OMP_SINGLE:
8724 ctx = maybe_lookup_ctx (stmt);
8725 gcc_assert (ctx);
8726 lower_omp_single (gsi_p, ctx);
8727 break;
8728 case GIMPLE_OMP_MASTER:
8729 ctx = maybe_lookup_ctx (stmt);
8730 gcc_assert (ctx);
8731 lower_omp_master (gsi_p, ctx);
8732 break;
8733 case GIMPLE_OMP_TASKGROUP:
8734 ctx = maybe_lookup_ctx (stmt);
8735 gcc_assert (ctx);
8736 lower_omp_taskgroup (gsi_p, ctx);
8737 break;
8738 case GIMPLE_OMP_ORDERED:
8739 ctx = maybe_lookup_ctx (stmt);
8740 gcc_assert (ctx);
8741 lower_omp_ordered (gsi_p, ctx);
8742 break;
8743 case GIMPLE_OMP_CRITICAL:
8744 ctx = maybe_lookup_ctx (stmt);
8745 gcc_assert (ctx);
8746 lower_omp_critical (gsi_p, ctx);
8747 break;
8748 case GIMPLE_OMP_ATOMIC_LOAD:
8749 if ((ctx || task_shared_vars)
8750 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8751 as_a <gomp_atomic_load *> (stmt)),
8752 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8753 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8754 break;
8755 case GIMPLE_OMP_TARGET:
8756 ctx = maybe_lookup_ctx (stmt);
8757 gcc_assert (ctx);
8758 lower_omp_target (gsi_p, ctx);
8759 break;
8760 case GIMPLE_OMP_TEAMS:
8761 ctx = maybe_lookup_ctx (stmt);
8762 gcc_assert (ctx);
8763 lower_omp_teams (gsi_p, ctx);
8764 break;
8765 case GIMPLE_OMP_GRID_BODY:
8766 ctx = maybe_lookup_ctx (stmt);
8767 gcc_assert (ctx);
8768 lower_omp_grid_body (gsi_p, ctx);
8769 break;
8770 case GIMPLE_CALL:
8771 tree fndecl;
8772 call_stmt = as_a <gcall *> (stmt);
8773 fndecl = gimple_call_fndecl (call_stmt);
8774 if (fndecl
8775 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8776 switch (DECL_FUNCTION_CODE (fndecl))
8778 case BUILT_IN_GOMP_BARRIER:
8779 if (ctx == NULL)
8780 break;
8781 /* FALLTHRU */
8782 case BUILT_IN_GOMP_CANCEL:
8783 case BUILT_IN_GOMP_CANCELLATION_POINT:
8784 omp_context *cctx;
8785 cctx = ctx;
8786 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8787 cctx = cctx->outer;
8788 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8789 if (!cctx->cancellable)
8791 if (DECL_FUNCTION_CODE (fndecl)
8792 == BUILT_IN_GOMP_CANCELLATION_POINT)
8794 stmt = gimple_build_nop ();
8795 gsi_replace (gsi_p, stmt, false);
8797 break;
8799 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8801 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8802 gimple_call_set_fndecl (call_stmt, fndecl);
8803 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8805 tree lhs;
8806 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8807 gimple_call_set_lhs (call_stmt, lhs);
8808 tree fallthru_label;
8809 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8810 gimple *g;
8811 g = gimple_build_label (fallthru_label);
8812 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8813 g = gimple_build_cond (NE_EXPR, lhs,
8814 fold_convert (TREE_TYPE (lhs),
8815 boolean_false_node),
8816 cctx->cancel_label, fallthru_label);
8817 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8818 break;
8819 default:
8820 break;
8822 /* FALLTHRU */
8823 default:
8824 if ((ctx || task_shared_vars)
8825 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8826 ctx ? NULL : &wi))
8828 /* Just remove clobbers, this should happen only if we have
8829 "privatized" local addressable variables in SIMD regions,
8830 the clobber isn't needed in that case and gimplifying address
8831 of the ARRAY_REF into a pointer and creating MEM_REF based
8832 clobber would create worse code than we get with the clobber
8833 dropped. */
8834 if (gimple_clobber_p (stmt))
8836 gsi_replace (gsi_p, gimple_build_nop (), true);
8837 break;
8839 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8841 break;
8845 static void
8846 lower_omp (gimple_seq *body, omp_context *ctx)
8848 location_t saved_location = input_location;
8849 gimple_stmt_iterator gsi;
8850 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8851 lower_omp_1 (&gsi, ctx);
8852 /* During gimplification, we haven't folded statments inside offloading
8853 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8854 if (target_nesting_level || taskreg_nesting_level)
8855 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8856 fold_stmt (&gsi);
8857 input_location = saved_location;
8860 /* Main entry point. */
8862 static unsigned int
8863 execute_lower_omp (void)
8865 gimple_seq body;
8866 int i;
8867 omp_context *ctx;
8869 /* This pass always runs, to provide PROP_gimple_lomp.
8870 But often, there is nothing to do. */
8871 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8872 && flag_openmp_simd == 0)
8873 return 0;
8875 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8876 delete_omp_context);
8878 body = gimple_body (current_function_decl);
8880 if (hsa_gen_requested_p ())
8881 omp_grid_gridify_all_targets (&body);
8883 scan_omp (&body, NULL);
8884 gcc_assert (taskreg_nesting_level == 0);
8885 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8886 finish_taskreg_scan (ctx);
8887 taskreg_contexts.release ();
8889 if (all_contexts->root)
8891 if (task_shared_vars)
8892 push_gimplify_context ();
8893 lower_omp (&body, NULL);
8894 if (task_shared_vars)
8895 pop_gimplify_context (NULL);
8898 if (all_contexts)
8900 splay_tree_delete (all_contexts);
8901 all_contexts = NULL;
8903 BITMAP_FREE (task_shared_vars);
8904 return 0;
8907 namespace {
8909 const pass_data pass_data_lower_omp =
8911 GIMPLE_PASS, /* type */
8912 "omplower", /* name */
8913 OPTGROUP_OPENMP, /* optinfo_flags */
8914 TV_NONE, /* tv_id */
8915 PROP_gimple_any, /* properties_required */
8916 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8917 0, /* properties_destroyed */
8918 0, /* todo_flags_start */
8919 0, /* todo_flags_finish */
8922 class pass_lower_omp : public gimple_opt_pass
8924 public:
8925 pass_lower_omp (gcc::context *ctxt)
8926 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8929 /* opt_pass methods: */
8930 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8932 }; // class pass_lower_omp
8934 } // anon namespace
8936 gimple_opt_pass *
8937 make_pass_lower_omp (gcc::context *ctxt)
8939 return new pass_lower_omp (ctxt);
8942 /* The following is a utility to diagnose structured block violations.
8943 It is not part of the "omplower" pass, as that's invoked too late. It
8944 should be invoked by the respective front ends after gimplification. */
8946 static splay_tree all_labels;
8948 /* Check for mismatched contexts and generate an error if needed. Return
8949 true if an error is detected. */
8951 static bool
8952 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
8953 gimple *branch_ctx, gimple *label_ctx)
8955 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
8956 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
8958 if (label_ctx == branch_ctx)
8959 return false;
8961 const char* kind = NULL;
8963 if (flag_cilkplus)
8965 if ((branch_ctx
8966 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
8967 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
8968 || (label_ctx
8969 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
8970 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
8971 kind = "Cilk Plus";
8973 if (flag_openacc)
8975 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
8976 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
8978 gcc_checking_assert (kind == NULL);
8979 kind = "OpenACC";
8982 if (kind == NULL)
8984 gcc_checking_assert (flag_openmp);
8985 kind = "OpenMP";
8988 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
8989 so we could traverse it and issue a correct "exit" or "enter" error
8990 message upon a structured block violation.
8992 We built the context by building a list with tree_cons'ing, but there is
8993 no easy counterpart in gimple tuples. It seems like far too much work
8994 for issuing exit/enter error messages. If someone really misses the
8995 distinct error message... patches welcome. */
8997 #if 0
8998 /* Try to avoid confusing the user by producing and error message
8999 with correct "exit" or "enter" verbiage. We prefer "exit"
9000 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9001 if (branch_ctx == NULL)
9002 exit_p = false;
9003 else
9005 while (label_ctx)
9007 if (TREE_VALUE (label_ctx) == branch_ctx)
9009 exit_p = false;
9010 break;
9012 label_ctx = TREE_CHAIN (label_ctx);
9016 if (exit_p)
9017 error ("invalid exit from %s structured block", kind);
9018 else
9019 error ("invalid entry to %s structured block", kind);
9020 #endif
9022 /* If it's obvious we have an invalid entry, be specific about the error. */
9023 if (branch_ctx == NULL)
9024 error ("invalid entry to %s structured block", kind);
9025 else
9027 /* Otherwise, be vague and lazy, but efficient. */
9028 error ("invalid branch to/from %s structured block", kind);
9031 gsi_replace (gsi_p, gimple_build_nop (), false);
9032 return true;
9035 /* Pass 1: Create a minimal tree of structured blocks, and record
9036 where each label is found. */
9038 static tree
9039 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9040 struct walk_stmt_info *wi)
9042 gimple *context = (gimple *) wi->info;
9043 gimple *inner_context;
9044 gimple *stmt = gsi_stmt (*gsi_p);
9046 *handled_ops_p = true;
9048 switch (gimple_code (stmt))
9050 WALK_SUBSTMTS;
9052 case GIMPLE_OMP_PARALLEL:
9053 case GIMPLE_OMP_TASK:
9054 case GIMPLE_OMP_SECTIONS:
9055 case GIMPLE_OMP_SINGLE:
9056 case GIMPLE_OMP_SECTION:
9057 case GIMPLE_OMP_MASTER:
9058 case GIMPLE_OMP_ORDERED:
9059 case GIMPLE_OMP_CRITICAL:
9060 case GIMPLE_OMP_TARGET:
9061 case GIMPLE_OMP_TEAMS:
9062 case GIMPLE_OMP_TASKGROUP:
9063 /* The minimal context here is just the current OMP construct. */
9064 inner_context = stmt;
9065 wi->info = inner_context;
9066 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9067 wi->info = context;
9068 break;
9070 case GIMPLE_OMP_FOR:
9071 inner_context = stmt;
9072 wi->info = inner_context;
9073 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9074 walk them. */
9075 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9076 diagnose_sb_1, NULL, wi);
9077 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9078 wi->info = context;
9079 break;
9081 case GIMPLE_LABEL:
9082 splay_tree_insert (all_labels,
9083 (splay_tree_key) gimple_label_label (
9084 as_a <glabel *> (stmt)),
9085 (splay_tree_value) context);
9086 break;
9088 default:
9089 break;
9092 return NULL_TREE;
9095 /* Pass 2: Check each branch and see if its context differs from that of
9096 the destination label's context. */
9098 static tree
9099 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9100 struct walk_stmt_info *wi)
9102 gimple *context = (gimple *) wi->info;
9103 splay_tree_node n;
9104 gimple *stmt = gsi_stmt (*gsi_p);
9106 *handled_ops_p = true;
9108 switch (gimple_code (stmt))
9110 WALK_SUBSTMTS;
9112 case GIMPLE_OMP_PARALLEL:
9113 case GIMPLE_OMP_TASK:
9114 case GIMPLE_OMP_SECTIONS:
9115 case GIMPLE_OMP_SINGLE:
9116 case GIMPLE_OMP_SECTION:
9117 case GIMPLE_OMP_MASTER:
9118 case GIMPLE_OMP_ORDERED:
9119 case GIMPLE_OMP_CRITICAL:
9120 case GIMPLE_OMP_TARGET:
9121 case GIMPLE_OMP_TEAMS:
9122 case GIMPLE_OMP_TASKGROUP:
9123 wi->info = stmt;
9124 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9125 wi->info = context;
9126 break;
9128 case GIMPLE_OMP_FOR:
9129 wi->info = stmt;
9130 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9131 walk them. */
9132 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9133 diagnose_sb_2, NULL, wi);
9134 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9135 wi->info = context;
9136 break;
9138 case GIMPLE_COND:
9140 gcond *cond_stmt = as_a <gcond *> (stmt);
9141 tree lab = gimple_cond_true_label (cond_stmt);
9142 if (lab)
9144 n = splay_tree_lookup (all_labels,
9145 (splay_tree_key) lab);
9146 diagnose_sb_0 (gsi_p, context,
9147 n ? (gimple *) n->value : NULL);
9149 lab = gimple_cond_false_label (cond_stmt);
9150 if (lab)
9152 n = splay_tree_lookup (all_labels,
9153 (splay_tree_key) lab);
9154 diagnose_sb_0 (gsi_p, context,
9155 n ? (gimple *) n->value : NULL);
9158 break;
9160 case GIMPLE_GOTO:
9162 tree lab = gimple_goto_dest (stmt);
9163 if (TREE_CODE (lab) != LABEL_DECL)
9164 break;
9166 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9167 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9169 break;
9171 case GIMPLE_SWITCH:
9173 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9174 unsigned int i;
9175 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9177 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9178 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9179 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9180 break;
9183 break;
9185 case GIMPLE_RETURN:
9186 diagnose_sb_0 (gsi_p, context, NULL);
9187 break;
9189 default:
9190 break;
9193 return NULL_TREE;
9196 static unsigned int
9197 diagnose_omp_structured_block_errors (void)
9199 struct walk_stmt_info wi;
9200 gimple_seq body = gimple_body (current_function_decl);
9202 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9204 memset (&wi, 0, sizeof (wi));
9205 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9207 memset (&wi, 0, sizeof (wi));
9208 wi.want_locations = true;
9209 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9211 gimple_set_body (current_function_decl, body);
9213 splay_tree_delete (all_labels);
9214 all_labels = NULL;
9216 return 0;
9219 namespace {
9221 const pass_data pass_data_diagnose_omp_blocks =
9223 GIMPLE_PASS, /* type */
9224 "*diagnose_omp_blocks", /* name */
9225 OPTGROUP_OPENMP, /* optinfo_flags */
9226 TV_NONE, /* tv_id */
9227 PROP_gimple_any, /* properties_required */
9228 0, /* properties_provided */
9229 0, /* properties_destroyed */
9230 0, /* todo_flags_start */
9231 0, /* todo_flags_finish */
9234 class pass_diagnose_omp_blocks : public gimple_opt_pass
9236 public:
9237 pass_diagnose_omp_blocks (gcc::context *ctxt)
9238 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9241 /* opt_pass methods: */
9242 virtual bool gate (function *)
9244 return flag_cilkplus || flag_openacc || flag_openmp;
9246 virtual unsigned int execute (function *)
9248 return diagnose_omp_structured_block_errors ();
9251 }; // class pass_diagnose_omp_blocks
9253 } // anon namespace
9255 gimple_opt_pass *
9256 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9258 return new pass_diagnose_omp_blocks (ctxt);
9262 #include "gt-omp-low.h"