PR tree-optimization/78496
[official-gcc.git] / gcc / omp-low.c
blob9cc29964dfa03c7f2eb4ba4043d11bc2216f1b15
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
115 /* What to do with variables with implicitly determined sharing
116 attributes. */
117 enum omp_clause_default_kind default_kind;
119 /* Nesting depth of this context. Used to beautify error messages re
120 invalid gotos. The outermost ctx is depth 1, with depth 0 being
121 reserved for the main body of the function. */
122 int depth;
124 /* True if this parallel directive is nested within another. */
125 bool is_nested;
127 /* True if this construct can be cancelled. */
128 bool cancellable;
131 static splay_tree all_contexts;
132 static int taskreg_nesting_level;
133 static int target_nesting_level;
134 static bitmap task_shared_vars;
135 static vec<omp_context *> taskreg_contexts;
137 static void scan_omp (gimple_seq *, omp_context *);
138 static tree scan_omp_1_op (tree *, int *, void *);
140 #define WALK_SUBSTMTS \
141 case GIMPLE_BIND: \
142 case GIMPLE_TRY: \
143 case GIMPLE_CATCH: \
144 case GIMPLE_EH_FILTER: \
145 case GIMPLE_TRANSACTION: \
146 /* The sub-statements for these should be walked. */ \
147 *handled_ops_p = false; \
148 break;
150 /* Return true if CTX corresponds to an oacc parallel region. */
152 static bool
153 is_oacc_parallel (omp_context *ctx)
155 enum gimple_code outer_type = gimple_code (ctx->stmt);
156 return ((outer_type == GIMPLE_OMP_TARGET)
157 && (gimple_omp_target_kind (ctx->stmt)
158 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
161 /* Return true if CTX corresponds to an oacc kernels region. */
163 static bool
164 is_oacc_kernels (omp_context *ctx)
166 enum gimple_code outer_type = gimple_code (ctx->stmt);
167 return ((outer_type == GIMPLE_OMP_TARGET)
168 && (gimple_omp_target_kind (ctx->stmt)
169 == GF_OMP_TARGET_KIND_OACC_KERNELS));
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173 data member privatization, return the underlying "this" parameter,
174 otherwise return NULL. */
176 tree
177 omp_member_access_dummy_var (tree decl)
179 if (!VAR_P (decl)
180 || !DECL_ARTIFICIAL (decl)
181 || !DECL_IGNORED_P (decl)
182 || !DECL_HAS_VALUE_EXPR_P (decl)
183 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
184 return NULL_TREE;
186 tree v = DECL_VALUE_EXPR (decl);
187 if (TREE_CODE (v) != COMPONENT_REF)
188 return NULL_TREE;
190 while (1)
191 switch (TREE_CODE (v))
193 case COMPONENT_REF:
194 case MEM_REF:
195 case INDIRECT_REF:
196 CASE_CONVERT:
197 case POINTER_PLUS_EXPR:
198 v = TREE_OPERAND (v, 0);
199 continue;
200 case PARM_DECL:
201 if (DECL_CONTEXT (v) == current_function_decl
202 && DECL_ARTIFICIAL (v)
203 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
204 return v;
205 return NULL_TREE;
206 default:
207 return NULL_TREE;
211 /* Helper for unshare_and_remap, called through walk_tree. */
213 static tree
214 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
216 tree *pair = (tree *) data;
217 if (*tp == pair[0])
219 *tp = unshare_expr (pair[1]);
220 *walk_subtrees = 0;
222 else if (IS_TYPE_OR_DECL_P (*tp))
223 *walk_subtrees = 0;
224 return NULL_TREE;
227 /* Return unshare_expr (X) with all occurrences of FROM
228 replaced with TO. */
230 static tree
231 unshare_and_remap (tree x, tree from, tree to)
233 tree pair[2] = { from, to };
234 x = unshare_expr (x);
235 walk_tree (&x, unshare_and_remap_1, pair, NULL);
236 return x;
239 /* Convenience function for calling scan_omp_1_op on tree operands. */
241 static inline tree
242 scan_omp_op (tree *tp, omp_context *ctx)
244 struct walk_stmt_info wi;
246 memset (&wi, 0, sizeof (wi));
247 wi.info = ctx;
248 wi.want_locations = true;
250 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
253 static void lower_omp (gimple_seq *, omp_context *);
254 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
255 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
257 /* Return true if CTX is for an omp parallel. */
259 static inline bool
260 is_parallel_ctx (omp_context *ctx)
262 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
266 /* Return true if CTX is for an omp task. */
268 static inline bool
269 is_task_ctx (omp_context *ctx)
271 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
275 /* Return true if CTX is for an omp taskloop. */
277 static inline bool
278 is_taskloop_ctx (omp_context *ctx)
280 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
281 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
285 /* Return true if CTX is for an omp parallel or omp task. */
287 static inline bool
288 is_taskreg_ctx (omp_context *ctx)
290 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
293 /* Return true if EXPR is variable sized. */
295 static inline bool
296 is_variable_sized (const_tree expr)
298 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
301 /* Lookup variables. The "maybe" form
302 allows for the variable form to not have been entered, otherwise we
303 assert that the variable must have been entered. */
305 static inline tree
306 lookup_decl (tree var, omp_context *ctx)
308 tree *n = ctx->cb.decl_map->get (var);
309 return *n;
312 static inline tree
313 maybe_lookup_decl (const_tree var, omp_context *ctx)
315 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
316 return n ? *n : NULL_TREE;
319 static inline tree
320 lookup_field (tree var, omp_context *ctx)
322 splay_tree_node n;
323 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
324 return (tree) n->value;
327 static inline tree
328 lookup_sfield (splay_tree_key key, omp_context *ctx)
330 splay_tree_node n;
331 n = splay_tree_lookup (ctx->sfield_map
332 ? ctx->sfield_map : ctx->field_map, key);
333 return (tree) n->value;
336 static inline tree
337 lookup_sfield (tree var, omp_context *ctx)
339 return lookup_sfield ((splay_tree_key) var, ctx);
342 static inline tree
343 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
345 splay_tree_node n;
346 n = splay_tree_lookup (ctx->field_map, key);
347 return n ? (tree) n->value : NULL_TREE;
350 static inline tree
351 maybe_lookup_field (tree var, omp_context *ctx)
353 return maybe_lookup_field ((splay_tree_key) var, ctx);
356 /* Return true if DECL should be copied by pointer. SHARED_CTX is
357 the parallel context if DECL is to be shared. */
359 static bool
360 use_pointer_for_field (tree decl, omp_context *shared_ctx)
362 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
363 || TYPE_ATOMIC (TREE_TYPE (decl)))
364 return true;
366 /* We can only use copy-in/copy-out semantics for shared variables
367 when we know the value is not accessible from an outer scope. */
368 if (shared_ctx)
370 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
372 /* ??? Trivially accessible from anywhere. But why would we even
373 be passing an address in this case? Should we simply assert
374 this to be false, or should we have a cleanup pass that removes
375 these from the list of mappings? */
376 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
377 return true;
379 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 without analyzing the expression whether or not its location
381 is accessible to anyone else. In the case of nested parallel
382 regions it certainly may be. */
383 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
384 return true;
386 /* Do not use copy-in/copy-out for variables that have their
387 address taken. */
388 if (TREE_ADDRESSABLE (decl))
389 return true;
391 /* lower_send_shared_vars only uses copy-in, but not copy-out
392 for these. */
393 if (TREE_READONLY (decl)
394 || ((TREE_CODE (decl) == RESULT_DECL
395 || TREE_CODE (decl) == PARM_DECL)
396 && DECL_BY_REFERENCE (decl)))
397 return false;
399 /* Disallow copy-in/out in nested parallel if
400 decl is shared in outer parallel, otherwise
401 each thread could store the shared variable
402 in its own copy-in location, making the
403 variable no longer really shared. */
404 if (shared_ctx->is_nested)
406 omp_context *up;
408 for (up = shared_ctx->outer; up; up = up->outer)
409 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
410 break;
412 if (up)
414 tree c;
416 for (c = gimple_omp_taskreg_clauses (up->stmt);
417 c; c = OMP_CLAUSE_CHAIN (c))
418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
419 && OMP_CLAUSE_DECL (c) == decl)
420 break;
422 if (c)
423 goto maybe_mark_addressable_and_ret;
427 /* For tasks avoid using copy-in/out. As tasks can be
428 deferred or executed in different thread, when GOMP_task
429 returns, the task hasn't necessarily terminated. */
430 if (is_task_ctx (shared_ctx))
432 tree outer;
433 maybe_mark_addressable_and_ret:
434 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
435 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
437 /* Taking address of OUTER in lower_send_shared_vars
438 might need regimplification of everything that uses the
439 variable. */
440 if (!task_shared_vars)
441 task_shared_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
443 TREE_ADDRESSABLE (outer) = 1;
445 return true;
449 return false;
452 /* Construct a new automatic decl similar to VAR. */
454 static tree
455 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
457 tree copy = copy_var_decl (var, name, type);
459 DECL_CONTEXT (copy) = current_function_decl;
460 DECL_CHAIN (copy) = ctx->block_vars;
461 /* If VAR is listed in task_shared_vars, it means it wasn't
462 originally addressable and is just because task needs to take
463 it's address. But we don't need to take address of privatizations
464 from that var. */
465 if (TREE_ADDRESSABLE (var)
466 && task_shared_vars
467 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
468 TREE_ADDRESSABLE (copy) = 0;
469 ctx->block_vars = copy;
471 return copy;
474 static tree
475 omp_copy_decl_1 (tree var, omp_context *ctx)
477 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481 as appropriate. */
482 static tree
483 omp_build_component_ref (tree obj, tree field)
485 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
486 if (TREE_THIS_VOLATILE (field))
487 TREE_THIS_VOLATILE (ret) |= 1;
488 if (TREE_READONLY (field))
489 TREE_READONLY (ret) |= 1;
490 return ret;
493 /* Build tree nodes to access the field for VAR on the receiver side. */
495 static tree
496 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
498 tree x, field = lookup_field (var, ctx);
500 /* If the receiver record type was remapped in the child function,
501 remap the field into the new record type. */
502 x = maybe_lookup_field (field, ctx);
503 if (x != NULL)
504 field = x;
506 x = build_simple_mem_ref (ctx->receiver_decl);
507 TREE_THIS_NOTRAP (x) = 1;
508 x = omp_build_component_ref (x, field);
509 if (by_ref)
511 x = build_simple_mem_ref (x);
512 TREE_THIS_NOTRAP (x) = 1;
515 return x;
518 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
519 of a parallel, this is a component reference; for workshare constructs
520 this is some variable. */
522 static tree
523 build_outer_var_ref (tree var, omp_context *ctx,
524 enum omp_clause_code code = OMP_CLAUSE_ERROR)
526 tree x;
528 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
529 x = var;
530 else if (is_variable_sized (var))
532 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
533 x = build_outer_var_ref (x, ctx, code);
534 x = build_simple_mem_ref (x);
536 else if (is_taskreg_ctx (ctx))
538 bool by_ref = use_pointer_for_field (var, NULL);
539 x = build_receiver_ref (var, by_ref, ctx);
541 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
542 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
543 || (code == OMP_CLAUSE_PRIVATE
544 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
545 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
546 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
548 /* #pragma omp simd isn't a worksharing construct, and can reference
549 even private vars in its linear etc. clauses.
550 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 to private vars in all worksharing constructs. */
552 x = NULL_TREE;
553 if (ctx->outer && is_taskreg_ctx (ctx))
554 x = lookup_decl (var, ctx->outer);
555 else if (ctx->outer)
556 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
557 if (x == NULL_TREE)
558 x = var;
560 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
562 gcc_assert (ctx->outer);
563 splay_tree_node n
564 = splay_tree_lookup (ctx->outer->field_map,
565 (splay_tree_key) &DECL_UID (var));
566 if (n == NULL)
568 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
569 x = var;
570 else
571 x = lookup_decl (var, ctx->outer);
573 else
575 tree field = (tree) n->value;
576 /* If the receiver record type was remapped in the child function,
577 remap the field into the new record type. */
578 x = maybe_lookup_field (field, ctx->outer);
579 if (x != NULL)
580 field = x;
582 x = build_simple_mem_ref (ctx->outer->receiver_decl);
583 x = omp_build_component_ref (x, field);
584 if (use_pointer_for_field (var, ctx->outer))
585 x = build_simple_mem_ref (x);
588 else if (ctx->outer)
590 omp_context *outer = ctx->outer;
591 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
593 outer = outer->outer;
594 gcc_assert (outer
595 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
597 x = lookup_decl (var, outer);
599 else if (omp_is_reference (var))
600 /* This can happen with orphaned constructs. If var is reference, it is
601 possible it is shared and as such valid. */
602 x = var;
603 else if (omp_member_access_dummy_var (var))
604 x = var;
605 else
606 gcc_unreachable ();
608 if (x == var)
610 tree t = omp_member_access_dummy_var (var);
611 if (t)
613 x = DECL_VALUE_EXPR (var);
614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
615 if (o != t)
616 x = unshare_and_remap (x, t, o);
617 else
618 x = unshare_expr (x);
622 if (omp_is_reference (var))
623 x = build_simple_mem_ref (x);
625 return x;
628 /* Build tree nodes to access the field for VAR on the sender side. */
630 static tree
631 build_sender_ref (splay_tree_key key, omp_context *ctx)
633 tree field = lookup_sfield (key, ctx);
634 return omp_build_component_ref (ctx->sender_decl, field);
637 static tree
638 build_sender_ref (tree var, omp_context *ctx)
640 return build_sender_ref ((splay_tree_key) var, ctx);
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
644 BASE_POINTERS_RESTRICT, declare the field with restrict. */
646 static void
647 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
648 bool base_pointers_restrict = false)
650 tree field, type, sfield = NULL_TREE;
651 splay_tree_key key = (splay_tree_key) var;
653 if ((mask & 8) != 0)
655 key = (splay_tree_key) &DECL_UID (var);
656 gcc_checking_assert (key != (splay_tree_key) var);
658 gcc_assert ((mask & 1) == 0
659 || !splay_tree_lookup (ctx->field_map, key));
660 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
661 || !splay_tree_lookup (ctx->sfield_map, key));
662 gcc_assert ((mask & 3) == 3
663 || !is_gimple_omp_oacc (ctx->stmt));
665 type = TREE_TYPE (var);
666 /* Prevent redeclaring the var in the split-off function with a restrict
667 pointer type. Note that we only clear type itself, restrict qualifiers in
668 the pointed-to type will be ignored by points-to analysis. */
669 if (POINTER_TYPE_P (type)
670 && TYPE_RESTRICT (type))
671 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
673 if (mask & 4)
675 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
676 type = build_pointer_type (build_pointer_type (type));
678 else if (by_ref)
680 type = build_pointer_type (type);
681 if (base_pointers_restrict)
682 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
684 else if ((mask & 3) == 1 && omp_is_reference (var))
685 type = TREE_TYPE (type);
687 field = build_decl (DECL_SOURCE_LOCATION (var),
688 FIELD_DECL, DECL_NAME (var), type);
690 /* Remember what variable this field was created for. This does have a
691 side effect of making dwarf2out ignore this member, so for helpful
692 debugging we clear it later in delete_omp_context. */
693 DECL_ABSTRACT_ORIGIN (field) = var;
694 if (type == TREE_TYPE (var))
696 SET_DECL_ALIGN (field, DECL_ALIGN (var));
697 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
698 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
700 else
701 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
703 if ((mask & 3) == 3)
705 insert_field_into_struct (ctx->record_type, field);
706 if (ctx->srecord_type)
708 sfield = build_decl (DECL_SOURCE_LOCATION (var),
709 FIELD_DECL, DECL_NAME (var), type);
710 DECL_ABSTRACT_ORIGIN (sfield) = var;
711 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
712 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
713 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
714 insert_field_into_struct (ctx->srecord_type, sfield);
717 else
719 if (ctx->srecord_type == NULL_TREE)
721 tree t;
723 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
724 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
725 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
727 sfield = build_decl (DECL_SOURCE_LOCATION (t),
728 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
729 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
730 insert_field_into_struct (ctx->srecord_type, sfield);
731 splay_tree_insert (ctx->sfield_map,
732 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
733 (splay_tree_value) sfield);
736 sfield = field;
737 insert_field_into_struct ((mask & 1) ? ctx->record_type
738 : ctx->srecord_type, field);
741 if (mask & 1)
742 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
743 if ((mask & 2) && ctx->sfield_map)
744 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
747 static tree
748 install_var_local (tree var, omp_context *ctx)
750 tree new_var = omp_copy_decl_1 (var, ctx);
751 insert_decl_map (&ctx->cb, var, new_var);
752 return new_var;
755 /* Adjust the replacement for DECL in CTX for the new context. This means
756 copying the DECL_VALUE_EXPR, and fixing up the type. */
758 static void
759 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
761 tree new_decl, size;
763 new_decl = lookup_decl (decl, ctx);
765 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
767 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
768 && DECL_HAS_VALUE_EXPR_P (decl))
770 tree ve = DECL_VALUE_EXPR (decl);
771 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
772 SET_DECL_VALUE_EXPR (new_decl, ve);
773 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
776 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
778 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
779 if (size == error_mark_node)
780 size = TYPE_SIZE (TREE_TYPE (new_decl));
781 DECL_SIZE (new_decl) = size;
783 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
784 if (size == error_mark_node)
785 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
786 DECL_SIZE_UNIT (new_decl) = size;
790 /* The callback for remap_decl. Search all containing contexts for a
791 mapping of the variable; this avoids having to duplicate the splay
792 tree ahead of time. We know a mapping doesn't already exist in the
793 given context. Create new mappings to implement default semantics. */
795 static tree
796 omp_copy_decl (tree var, copy_body_data *cb)
798 omp_context *ctx = (omp_context *) cb;
799 tree new_var;
801 if (TREE_CODE (var) == LABEL_DECL)
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_DEFAULT:
1166 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1167 break;
1169 case OMP_CLAUSE_FINAL:
1170 case OMP_CLAUSE_IF:
1171 case OMP_CLAUSE_NUM_THREADS:
1172 case OMP_CLAUSE_NUM_TEAMS:
1173 case OMP_CLAUSE_THREAD_LIMIT:
1174 case OMP_CLAUSE_DEVICE:
1175 case OMP_CLAUSE_SCHEDULE:
1176 case OMP_CLAUSE_DIST_SCHEDULE:
1177 case OMP_CLAUSE_DEPEND:
1178 case OMP_CLAUSE_PRIORITY:
1179 case OMP_CLAUSE_GRAINSIZE:
1180 case OMP_CLAUSE_NUM_TASKS:
1181 case OMP_CLAUSE__CILK_FOR_COUNT_:
1182 case OMP_CLAUSE_NUM_GANGS:
1183 case OMP_CLAUSE_NUM_WORKERS:
1184 case OMP_CLAUSE_VECTOR_LENGTH:
1185 if (ctx->outer)
1186 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1187 break;
1189 case OMP_CLAUSE_TO:
1190 case OMP_CLAUSE_FROM:
1191 case OMP_CLAUSE_MAP:
1192 if (ctx->outer)
1193 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1194 decl = OMP_CLAUSE_DECL (c);
1195 /* Global variables with "omp declare target" attribute
1196 don't need to be copied, the receiver side will use them
1197 directly. However, global variables with "omp declare target link"
1198 attribute need to be copied. */
1199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1200 && DECL_P (decl)
1201 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1202 && (OMP_CLAUSE_MAP_KIND (c)
1203 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1204 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1205 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1206 && varpool_node::get_create (decl)->offloadable
1207 && !lookup_attribute ("omp declare target link",
1208 DECL_ATTRIBUTES (decl)))
1209 break;
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1211 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1213 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1214 not offloaded; there is nothing to map for those. */
1215 if (!is_gimple_omp_offloaded (ctx->stmt)
1216 && !POINTER_TYPE_P (TREE_TYPE (decl))
1217 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1218 break;
1220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1221 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1222 || (OMP_CLAUSE_MAP_KIND (c)
1223 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1225 if (TREE_CODE (decl) == COMPONENT_REF
1226 || (TREE_CODE (decl) == INDIRECT_REF
1227 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1228 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1229 == REFERENCE_TYPE)))
1230 break;
1231 if (DECL_SIZE (decl)
1232 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1234 tree decl2 = DECL_VALUE_EXPR (decl);
1235 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1236 decl2 = TREE_OPERAND (decl2, 0);
1237 gcc_assert (DECL_P (decl2));
1238 install_var_local (decl2, ctx);
1240 install_var_local (decl, ctx);
1241 break;
1243 if (DECL_P (decl))
1245 if (DECL_SIZE (decl)
1246 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1248 tree decl2 = DECL_VALUE_EXPR (decl);
1249 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1250 decl2 = TREE_OPERAND (decl2, 0);
1251 gcc_assert (DECL_P (decl2));
1252 install_var_field (decl2, true, 3, ctx);
1253 install_var_local (decl2, ctx);
1254 install_var_local (decl, ctx);
1256 else
1258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1259 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1260 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1261 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1262 install_var_field (decl, true, 7, ctx);
1263 else
1264 install_var_field (decl, true, 3, ctx,
1265 base_pointers_restrict);
1266 if (is_gimple_omp_offloaded (ctx->stmt)
1267 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1268 install_var_local (decl, ctx);
1271 else
1273 tree base = get_base_address (decl);
1274 tree nc = OMP_CLAUSE_CHAIN (c);
1275 if (DECL_P (base)
1276 && nc != NULL_TREE
1277 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1278 && OMP_CLAUSE_DECL (nc) == base
1279 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1280 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1282 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1283 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1285 else
1287 if (ctx->outer)
1289 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1290 decl = OMP_CLAUSE_DECL (c);
1292 gcc_assert (!splay_tree_lookup (ctx->field_map,
1293 (splay_tree_key) decl));
1294 tree field
1295 = build_decl (OMP_CLAUSE_LOCATION (c),
1296 FIELD_DECL, NULL_TREE, ptr_type_node);
1297 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1298 insert_field_into_struct (ctx->record_type, field);
1299 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1300 (splay_tree_value) field);
1303 break;
1305 case OMP_CLAUSE__GRIDDIM_:
1306 if (ctx->outer)
1308 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1309 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1311 break;
1313 case OMP_CLAUSE_NOWAIT:
1314 case OMP_CLAUSE_ORDERED:
1315 case OMP_CLAUSE_COLLAPSE:
1316 case OMP_CLAUSE_UNTIED:
1317 case OMP_CLAUSE_MERGEABLE:
1318 case OMP_CLAUSE_PROC_BIND:
1319 case OMP_CLAUSE_SAFELEN:
1320 case OMP_CLAUSE_SIMDLEN:
1321 case OMP_CLAUSE_THREADS:
1322 case OMP_CLAUSE_SIMD:
1323 case OMP_CLAUSE_NOGROUP:
1324 case OMP_CLAUSE_DEFAULTMAP:
1325 case OMP_CLAUSE_ASYNC:
1326 case OMP_CLAUSE_WAIT:
1327 case OMP_CLAUSE_GANG:
1328 case OMP_CLAUSE_WORKER:
1329 case OMP_CLAUSE_VECTOR:
1330 case OMP_CLAUSE_INDEPENDENT:
1331 case OMP_CLAUSE_AUTO:
1332 case OMP_CLAUSE_SEQ:
1333 case OMP_CLAUSE_TILE:
1334 case OMP_CLAUSE__SIMT_:
1335 break;
1337 case OMP_CLAUSE_ALIGNED:
1338 decl = OMP_CLAUSE_DECL (c);
1339 if (is_global_var (decl)
1340 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1341 install_var_local (decl, ctx);
1342 break;
1344 case OMP_CLAUSE__CACHE_:
1345 default:
1346 gcc_unreachable ();
1350 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1352 switch (OMP_CLAUSE_CODE (c))
1354 case OMP_CLAUSE_LASTPRIVATE:
1355 /* Let the corresponding firstprivate clause create
1356 the variable. */
1357 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1358 scan_array_reductions = true;
1359 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1360 break;
1361 /* FALLTHRU */
1363 case OMP_CLAUSE_FIRSTPRIVATE:
1364 case OMP_CLAUSE_PRIVATE:
1365 case OMP_CLAUSE_LINEAR:
1366 case OMP_CLAUSE_IS_DEVICE_PTR:
1367 decl = OMP_CLAUSE_DECL (c);
1368 if (is_variable_sized (decl))
1370 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1371 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1372 && is_gimple_omp_offloaded (ctx->stmt))
1374 tree decl2 = DECL_VALUE_EXPR (decl);
1375 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1376 decl2 = TREE_OPERAND (decl2, 0);
1377 gcc_assert (DECL_P (decl2));
1378 install_var_local (decl2, ctx);
1379 fixup_remapped_decl (decl2, ctx, false);
1381 install_var_local (decl, ctx);
1383 fixup_remapped_decl (decl, ctx,
1384 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1385 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1386 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1387 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1388 scan_array_reductions = true;
1389 break;
1391 case OMP_CLAUSE_REDUCTION:
1392 decl = OMP_CLAUSE_DECL (c);
1393 if (TREE_CODE (decl) != MEM_REF)
1395 if (is_variable_sized (decl))
1396 install_var_local (decl, ctx);
1397 fixup_remapped_decl (decl, ctx, false);
1399 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1400 scan_array_reductions = true;
1401 break;
1403 case OMP_CLAUSE_SHARED:
1404 /* Ignore shared directives in teams construct. */
1405 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1406 break;
1407 decl = OMP_CLAUSE_DECL (c);
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1409 break;
1410 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1412 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1413 ctx->outer)))
1414 break;
1415 bool by_ref = use_pointer_for_field (decl, ctx);
1416 install_var_field (decl, by_ref, 11, ctx);
1417 break;
1419 fixup_remapped_decl (decl, ctx, false);
1420 break;
1422 case OMP_CLAUSE_MAP:
1423 if (!is_gimple_omp_offloaded (ctx->stmt))
1424 break;
1425 decl = OMP_CLAUSE_DECL (c);
1426 if (DECL_P (decl)
1427 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1428 && (OMP_CLAUSE_MAP_KIND (c)
1429 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1430 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1431 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1432 && varpool_node::get_create (decl)->offloadable)
1433 break;
1434 if (DECL_P (decl))
1436 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1437 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1438 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1439 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1441 tree new_decl = lookup_decl (decl, ctx);
1442 TREE_TYPE (new_decl)
1443 = remap_type (TREE_TYPE (decl), &ctx->cb);
1445 else if (DECL_SIZE (decl)
1446 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1448 tree decl2 = DECL_VALUE_EXPR (decl);
1449 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1450 decl2 = TREE_OPERAND (decl2, 0);
1451 gcc_assert (DECL_P (decl2));
1452 fixup_remapped_decl (decl2, ctx, false);
1453 fixup_remapped_decl (decl, ctx, true);
1455 else
1456 fixup_remapped_decl (decl, ctx, false);
1458 break;
1460 case OMP_CLAUSE_COPYPRIVATE:
1461 case OMP_CLAUSE_COPYIN:
1462 case OMP_CLAUSE_DEFAULT:
1463 case OMP_CLAUSE_IF:
1464 case OMP_CLAUSE_NUM_THREADS:
1465 case OMP_CLAUSE_NUM_TEAMS:
1466 case OMP_CLAUSE_THREAD_LIMIT:
1467 case OMP_CLAUSE_DEVICE:
1468 case OMP_CLAUSE_SCHEDULE:
1469 case OMP_CLAUSE_DIST_SCHEDULE:
1470 case OMP_CLAUSE_NOWAIT:
1471 case OMP_CLAUSE_ORDERED:
1472 case OMP_CLAUSE_COLLAPSE:
1473 case OMP_CLAUSE_UNTIED:
1474 case OMP_CLAUSE_FINAL:
1475 case OMP_CLAUSE_MERGEABLE:
1476 case OMP_CLAUSE_PROC_BIND:
1477 case OMP_CLAUSE_SAFELEN:
1478 case OMP_CLAUSE_SIMDLEN:
1479 case OMP_CLAUSE_ALIGNED:
1480 case OMP_CLAUSE_DEPEND:
1481 case OMP_CLAUSE__LOOPTEMP_:
1482 case OMP_CLAUSE_TO:
1483 case OMP_CLAUSE_FROM:
1484 case OMP_CLAUSE_PRIORITY:
1485 case OMP_CLAUSE_GRAINSIZE:
1486 case OMP_CLAUSE_NUM_TASKS:
1487 case OMP_CLAUSE_THREADS:
1488 case OMP_CLAUSE_SIMD:
1489 case OMP_CLAUSE_NOGROUP:
1490 case OMP_CLAUSE_DEFAULTMAP:
1491 case OMP_CLAUSE_USE_DEVICE_PTR:
1492 case OMP_CLAUSE__CILK_FOR_COUNT_:
1493 case OMP_CLAUSE_ASYNC:
1494 case OMP_CLAUSE_WAIT:
1495 case OMP_CLAUSE_NUM_GANGS:
1496 case OMP_CLAUSE_NUM_WORKERS:
1497 case OMP_CLAUSE_VECTOR_LENGTH:
1498 case OMP_CLAUSE_GANG:
1499 case OMP_CLAUSE_WORKER:
1500 case OMP_CLAUSE_VECTOR:
1501 case OMP_CLAUSE_INDEPENDENT:
1502 case OMP_CLAUSE_AUTO:
1503 case OMP_CLAUSE_SEQ:
1504 case OMP_CLAUSE_TILE:
1505 case OMP_CLAUSE__GRIDDIM_:
1506 case OMP_CLAUSE__SIMT_:
1507 break;
1509 case OMP_CLAUSE__CACHE_:
1510 default:
1511 gcc_unreachable ();
1515 gcc_checking_assert (!scan_array_reductions
1516 || !is_gimple_omp_oacc (ctx->stmt));
1517 if (scan_array_reductions)
1519 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1521 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1523 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1524 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1527 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1529 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1530 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1531 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1535 /* Create a new name for omp child function. Returns an identifier. If
1536 IS_CILK_FOR is true then the suffix for the child function is
1537 "_cilk_for_fn." */
1539 static tree
1540 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1542 if (is_cilk_for)
1543 return clone_function_name (current_function_decl, "_cilk_for_fn");
1544 return clone_function_name (current_function_decl,
1545 task_copy ? "_omp_cpyfn" : "_omp_fn");
1548 /* Returns the type of the induction variable for the child function for
1549 _Cilk_for and the types for _high and _low variables based on TYPE. */
1551 static tree
1552 cilk_for_check_loop_diff_type (tree type)
1554 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1556 if (TYPE_UNSIGNED (type))
1557 return uint32_type_node;
1558 else
1559 return integer_type_node;
1561 else
1563 if (TYPE_UNSIGNED (type))
1564 return uint64_type_node;
1565 else
1566 return long_long_integer_type_node;
1570 /* Return true if CTX may belong to offloaded code: either if current function
1571 is offloaded, or any enclosing context corresponds to a target region. */
1573 static bool
1574 omp_maybe_offloaded_ctx (omp_context *ctx)
1576 if (cgraph_node::get (current_function_decl)->offloadable)
1577 return true;
1578 for (; ctx; ctx = ctx->outer)
1579 if (is_gimple_omp_offloaded (ctx->stmt))
1580 return true;
1581 return false;
1584 /* Build a decl for the omp child function. It'll not contain a body
1585 yet, just the bare decl. */
1587 static void
1588 create_omp_child_function (omp_context *ctx, bool task_copy)
1590 tree decl, type, name, t;
1592 tree cilk_for_count
1593 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1594 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1595 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1596 tree cilk_var_type = NULL_TREE;
1598 name = create_omp_child_function_name (task_copy,
1599 cilk_for_count != NULL_TREE);
1600 if (task_copy)
1601 type = build_function_type_list (void_type_node, ptr_type_node,
1602 ptr_type_node, NULL_TREE);
1603 else if (cilk_for_count)
1605 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1606 cilk_var_type = cilk_for_check_loop_diff_type (type);
1607 type = build_function_type_list (void_type_node, ptr_type_node,
1608 cilk_var_type, cilk_var_type, NULL_TREE);
1610 else
1611 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1613 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1615 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1616 || !task_copy);
1617 if (!task_copy)
1618 ctx->cb.dst_fn = decl;
1619 else
1620 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1622 TREE_STATIC (decl) = 1;
1623 TREE_USED (decl) = 1;
1624 DECL_ARTIFICIAL (decl) = 1;
1625 DECL_IGNORED_P (decl) = 0;
1626 TREE_PUBLIC (decl) = 0;
1627 DECL_UNINLINABLE (decl) = 1;
1628 DECL_EXTERNAL (decl) = 0;
1629 DECL_CONTEXT (decl) = NULL_TREE;
1630 DECL_INITIAL (decl) = make_node (BLOCK);
1631 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1632 if (omp_maybe_offloaded_ctx (ctx))
1634 cgraph_node::get_create (decl)->offloadable = 1;
1635 if (ENABLE_OFFLOADING)
1636 g->have_offload = true;
1639 if (cgraph_node::get_create (decl)->offloadable
1640 && !lookup_attribute ("omp declare target",
1641 DECL_ATTRIBUTES (current_function_decl)))
1643 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1644 ? "omp target entrypoint"
1645 : "omp declare target");
1646 DECL_ATTRIBUTES (decl)
1647 = tree_cons (get_identifier (target_attr),
1648 NULL_TREE, DECL_ATTRIBUTES (decl));
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 RESULT_DECL, NULL_TREE, void_type_node);
1653 DECL_ARTIFICIAL (t) = 1;
1654 DECL_IGNORED_P (t) = 1;
1655 DECL_CONTEXT (t) = decl;
1656 DECL_RESULT (decl) = t;
1658 /* _Cilk_for's child function requires two extra parameters called
1659 __low and __high that are set the by Cilk runtime when it calls this
1660 function. */
1661 if (cilk_for_count)
1663 t = build_decl (DECL_SOURCE_LOCATION (decl),
1664 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1665 DECL_ARTIFICIAL (t) = 1;
1666 DECL_NAMELESS (t) = 1;
1667 DECL_ARG_TYPE (t) = ptr_type_node;
1668 DECL_CONTEXT (t) = current_function_decl;
1669 TREE_USED (t) = 1;
1670 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1671 DECL_ARGUMENTS (decl) = t;
1673 t = build_decl (DECL_SOURCE_LOCATION (decl),
1674 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1675 DECL_ARTIFICIAL (t) = 1;
1676 DECL_NAMELESS (t) = 1;
1677 DECL_ARG_TYPE (t) = ptr_type_node;
1678 DECL_CONTEXT (t) = current_function_decl;
1679 TREE_USED (t) = 1;
1680 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1681 DECL_ARGUMENTS (decl) = t;
1684 tree data_name = get_identifier (".omp_data_i");
1685 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1686 ptr_type_node);
1687 DECL_ARTIFICIAL (t) = 1;
1688 DECL_NAMELESS (t) = 1;
1689 DECL_ARG_TYPE (t) = ptr_type_node;
1690 DECL_CONTEXT (t) = current_function_decl;
1691 TREE_USED (t) = 1;
1692 TREE_READONLY (t) = 1;
1693 if (cilk_for_count)
1694 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1695 DECL_ARGUMENTS (decl) = t;
1696 if (!task_copy)
1697 ctx->receiver_decl = t;
1698 else
1700 t = build_decl (DECL_SOURCE_LOCATION (decl),
1701 PARM_DECL, get_identifier (".omp_data_o"),
1702 ptr_type_node);
1703 DECL_ARTIFICIAL (t) = 1;
1704 DECL_NAMELESS (t) = 1;
1705 DECL_ARG_TYPE (t) = ptr_type_node;
1706 DECL_CONTEXT (t) = current_function_decl;
1707 TREE_USED (t) = 1;
1708 TREE_ADDRESSABLE (t) = 1;
1709 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1710 DECL_ARGUMENTS (decl) = t;
1713 /* Allocate memory for the function structure. The call to
1714 allocate_struct_function clobbers CFUN, so we need to restore
1715 it afterward. */
1716 push_struct_function (decl);
1717 cfun->function_end_locus = gimple_location (ctx->stmt);
1718 init_tree_ssa (cfun);
1719 pop_cfun ();
1722 /* Callback for walk_gimple_seq. Check if combined parallel
1723 contains gimple_omp_for_combined_into_p OMP_FOR. */
1725 tree
1726 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1727 bool *handled_ops_p,
1728 struct walk_stmt_info *wi)
1730 gimple *stmt = gsi_stmt (*gsi_p);
1732 *handled_ops_p = true;
1733 switch (gimple_code (stmt))
1735 WALK_SUBSTMTS;
1737 case GIMPLE_OMP_FOR:
1738 if (gimple_omp_for_combined_into_p (stmt)
1739 && gimple_omp_for_kind (stmt)
1740 == *(const enum gf_mask *) (wi->info))
1742 wi->info = stmt;
1743 return integer_zero_node;
1745 break;
1746 default:
1747 break;
1749 return NULL;
1752 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1754 static void
1755 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1756 omp_context *outer_ctx)
1758 struct walk_stmt_info wi;
1760 memset (&wi, 0, sizeof (wi));
1761 wi.val_only = true;
1762 wi.info = (void *) &msk;
1763 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1764 if (wi.info != (void *) &msk)
1766 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1767 struct omp_for_data fd;
1768 omp_extract_for_data (for_stmt, &fd, NULL);
1769 /* We need two temporaries with fd.loop.v type (istart/iend)
1770 and then (fd.collapse - 1) temporaries with the same
1771 type for count2 ... countN-1 vars if not constant. */
1772 size_t count = 2, i;
1773 tree type = fd.iter_type;
1774 if (fd.collapse > 1
1775 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1777 count += fd.collapse - 1;
1778 /* If there are lastprivate clauses on the inner
1779 GIMPLE_OMP_FOR, add one more temporaries for the total number
1780 of iterations (product of count1 ... countN-1). */
1781 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1782 OMP_CLAUSE_LASTPRIVATE))
1783 count++;
1784 else if (msk == GF_OMP_FOR_KIND_FOR
1785 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1786 OMP_CLAUSE_LASTPRIVATE))
1787 count++;
1789 for (i = 0; i < count; i++)
1791 tree temp = create_tmp_var (type);
1792 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1793 insert_decl_map (&outer_ctx->cb, temp, temp);
1794 OMP_CLAUSE_DECL (c) = temp;
1795 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1796 gimple_omp_taskreg_set_clauses (stmt, c);
1801 /* Scan an OpenMP parallel directive. */
1803 static void
1804 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 omp_context *ctx;
1807 tree name;
1808 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1810 /* Ignore parallel directives with empty bodies, unless there
1811 are copyin clauses. */
1812 if (optimize > 0
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1815 OMP_CLAUSE_COPYIN) == NULL)
1817 gsi_replace (gsi, gimple_build_nop (), false);
1818 return;
1821 if (gimple_omp_parallel_combined_p (stmt))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1824 ctx = new_omp_context (stmt, outer_ctx);
1825 taskreg_contexts.safe_push (ctx);
1826 if (taskreg_nesting_level > 1)
1827 ctx->is_nested = true;
1828 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1829 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1830 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1831 name = create_tmp_var_name (".omp_data_s");
1832 name = build_decl (gimple_location (stmt),
1833 TYPE_DECL, name, ctx->record_type);
1834 DECL_ARTIFICIAL (name) = 1;
1835 DECL_NAMELESS (name) = 1;
1836 TYPE_NAME (ctx->record_type) = name;
1837 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1838 if (!gimple_omp_parallel_grid_phony (stmt))
1840 create_omp_child_function (ctx, false);
1841 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1844 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1845 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1847 if (TYPE_FIELDS (ctx->record_type) == NULL)
1848 ctx->record_type = ctx->receiver_decl = NULL;
1851 /* Scan an OpenMP task directive. */
1853 static void
1854 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1856 omp_context *ctx;
1857 tree name, t;
1858 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1860 /* Ignore task directives with empty bodies, unless they have depend
1861 clause. */
1862 if (optimize > 0
1863 && empty_body_p (gimple_omp_body (stmt))
1864 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1866 gsi_replace (gsi, gimple_build_nop (), false);
1867 return;
1870 if (gimple_omp_task_taskloop_p (stmt))
1871 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1873 ctx = new_omp_context (stmt, outer_ctx);
1874 taskreg_contexts.safe_push (ctx);
1875 if (taskreg_nesting_level > 1)
1876 ctx->is_nested = true;
1877 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1878 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1879 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1880 name = create_tmp_var_name (".omp_data_s");
1881 name = build_decl (gimple_location (stmt),
1882 TYPE_DECL, name, ctx->record_type);
1883 DECL_ARTIFICIAL (name) = 1;
1884 DECL_NAMELESS (name) = 1;
1885 TYPE_NAME (ctx->record_type) = name;
1886 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1887 create_omp_child_function (ctx, false);
1888 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1890 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1892 if (ctx->srecord_type)
1894 name = create_tmp_var_name (".omp_data_a");
1895 name = build_decl (gimple_location (stmt),
1896 TYPE_DECL, name, ctx->srecord_type);
1897 DECL_ARTIFICIAL (name) = 1;
1898 DECL_NAMELESS (name) = 1;
1899 TYPE_NAME (ctx->srecord_type) = name;
1900 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1901 create_omp_child_function (ctx, true);
1904 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1906 if (TYPE_FIELDS (ctx->record_type) == NULL)
1908 ctx->record_type = ctx->receiver_decl = NULL;
1909 t = build_int_cst (long_integer_type_node, 0);
1910 gimple_omp_task_set_arg_size (stmt, t);
1911 t = build_int_cst (long_integer_type_node, 1);
1912 gimple_omp_task_set_arg_align (stmt, t);
1917 /* If any decls have been made addressable during scan_omp,
1918 adjust their fields if needed, and layout record types
1919 of parallel/task constructs. */
1921 static void
1922 finish_taskreg_scan (omp_context *ctx)
1924 if (ctx->record_type == NULL_TREE)
1925 return;
1927 /* If any task_shared_vars were needed, verify all
1928 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1929 statements if use_pointer_for_field hasn't changed
1930 because of that. If it did, update field types now. */
1931 if (task_shared_vars)
1933 tree c;
1935 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1936 c; c = OMP_CLAUSE_CHAIN (c))
1937 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1938 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1940 tree decl = OMP_CLAUSE_DECL (c);
1942 /* Global variables don't need to be copied,
1943 the receiver side will use them directly. */
1944 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1945 continue;
1946 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1947 || !use_pointer_for_field (decl, ctx))
1948 continue;
1949 tree field = lookup_field (decl, ctx);
1950 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1951 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1952 continue;
1953 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1954 TREE_THIS_VOLATILE (field) = 0;
1955 DECL_USER_ALIGN (field) = 0;
1956 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1957 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1958 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1959 if (ctx->srecord_type)
1961 tree sfield = lookup_sfield (decl, ctx);
1962 TREE_TYPE (sfield) = TREE_TYPE (field);
1963 TREE_THIS_VOLATILE (sfield) = 0;
1964 DECL_USER_ALIGN (sfield) = 0;
1965 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1966 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1967 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1972 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1974 layout_type (ctx->record_type);
1975 fixup_child_record_type (ctx);
1977 else
1979 location_t loc = gimple_location (ctx->stmt);
1980 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1981 /* Move VLA fields to the end. */
1982 p = &TYPE_FIELDS (ctx->record_type);
1983 while (*p)
1984 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1985 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1987 *q = *p;
1988 *p = TREE_CHAIN (*p);
1989 TREE_CHAIN (*q) = NULL_TREE;
1990 q = &TREE_CHAIN (*q);
1992 else
1993 p = &DECL_CHAIN (*p);
1994 *p = vla_fields;
1995 if (gimple_omp_task_taskloop_p (ctx->stmt))
1997 /* Move fields corresponding to first and second _looptemp_
1998 clause first. There are filled by GOMP_taskloop
1999 and thus need to be in specific positions. */
2000 tree c1 = gimple_omp_task_clauses (ctx->stmt);
2001 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2002 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2003 OMP_CLAUSE__LOOPTEMP_);
2004 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2005 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2006 p = &TYPE_FIELDS (ctx->record_type);
2007 while (*p)
2008 if (*p == f1 || *p == f2)
2009 *p = DECL_CHAIN (*p);
2010 else
2011 p = &DECL_CHAIN (*p);
2012 DECL_CHAIN (f1) = f2;
2013 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2014 TYPE_FIELDS (ctx->record_type) = f1;
2015 if (ctx->srecord_type)
2017 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2018 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2019 p = &TYPE_FIELDS (ctx->srecord_type);
2020 while (*p)
2021 if (*p == f1 || *p == f2)
2022 *p = DECL_CHAIN (*p);
2023 else
2024 p = &DECL_CHAIN (*p);
2025 DECL_CHAIN (f1) = f2;
2026 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2027 TYPE_FIELDS (ctx->srecord_type) = f1;
2030 layout_type (ctx->record_type);
2031 fixup_child_record_type (ctx);
2032 if (ctx->srecord_type)
2033 layout_type (ctx->srecord_type);
2034 tree t = fold_convert_loc (loc, long_integer_type_node,
2035 TYPE_SIZE_UNIT (ctx->record_type));
2036 gimple_omp_task_set_arg_size (ctx->stmt, t);
2037 t = build_int_cst (long_integer_type_node,
2038 TYPE_ALIGN_UNIT (ctx->record_type));
2039 gimple_omp_task_set_arg_align (ctx->stmt, t);
2043 /* Find the enclosing offload context. */
2045 static omp_context *
2046 enclosing_target_ctx (omp_context *ctx)
2048 for (; ctx; ctx = ctx->outer)
2049 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2050 break;
2052 return ctx;
2055 /* Return true if ctx is part of an oacc kernels region. */
2057 static bool
2058 ctx_in_oacc_kernels_region (omp_context *ctx)
2060 for (;ctx != NULL; ctx = ctx->outer)
2062 gimple *stmt = ctx->stmt;
2063 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2064 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2065 return true;
2068 return false;
2071 /* Check the parallelism clauses inside a kernels regions.
2072 Until kernels handling moves to use the same loop indirection
2073 scheme as parallel, we need to do this checking early. */
2075 static unsigned
2076 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2078 bool checking = true;
2079 unsigned outer_mask = 0;
2080 unsigned this_mask = 0;
2081 bool has_seq = false, has_auto = false;
2083 if (ctx->outer)
2084 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2085 if (!stmt)
2087 checking = false;
2088 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2089 return outer_mask;
2090 stmt = as_a <gomp_for *> (ctx->stmt);
2093 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2095 switch (OMP_CLAUSE_CODE (c))
2097 case OMP_CLAUSE_GANG:
2098 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2099 break;
2100 case OMP_CLAUSE_WORKER:
2101 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2102 break;
2103 case OMP_CLAUSE_VECTOR:
2104 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2105 break;
2106 case OMP_CLAUSE_SEQ:
2107 has_seq = true;
2108 break;
2109 case OMP_CLAUSE_AUTO:
2110 has_auto = true;
2111 break;
2112 default:
2113 break;
2117 if (checking)
2119 if (has_seq && (this_mask || has_auto))
2120 error_at (gimple_location (stmt), "%<seq%> overrides other"
2121 " OpenACC loop specifiers");
2122 else if (has_auto && this_mask)
2123 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2124 " OpenACC loop specifiers");
2126 if (this_mask & outer_mask)
2127 error_at (gimple_location (stmt), "inner loop uses same"
2128 " OpenACC parallelism as containing loop");
2131 return outer_mask | this_mask;
2134 /* Scan a GIMPLE_OMP_FOR. */
2136 static omp_context *
2137 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2139 omp_context *ctx;
2140 size_t i;
2141 tree clauses = gimple_omp_for_clauses (stmt);
2143 ctx = new_omp_context (stmt, outer_ctx);
2145 if (is_gimple_omp_oacc (stmt))
2147 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2149 if (!tgt || is_oacc_parallel (tgt))
2150 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2152 char const *check = NULL;
2154 switch (OMP_CLAUSE_CODE (c))
2156 case OMP_CLAUSE_GANG:
2157 check = "gang";
2158 break;
2160 case OMP_CLAUSE_WORKER:
2161 check = "worker";
2162 break;
2164 case OMP_CLAUSE_VECTOR:
2165 check = "vector";
2166 break;
2168 default:
2169 break;
2172 if (check && OMP_CLAUSE_OPERAND (c, 0))
2173 error_at (gimple_location (stmt),
2174 "argument not permitted on %qs clause in"
2175 " OpenACC %<parallel%>", check);
2178 if (tgt && is_oacc_kernels (tgt))
2180 /* Strip out reductions, as they are not handled yet. */
2181 tree *prev_ptr = &clauses;
2183 while (tree probe = *prev_ptr)
2185 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2187 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2188 *prev_ptr = *next_ptr;
2189 else
2190 prev_ptr = next_ptr;
2193 gimple_omp_for_set_clauses (stmt, clauses);
2194 check_oacc_kernel_gwv (stmt, ctx);
2198 scan_sharing_clauses (clauses, ctx);
2200 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2201 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2203 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2204 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2205 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2206 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2208 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2209 return ctx;
2212 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2214 static void
2215 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2216 omp_context *outer_ctx)
2218 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2219 gsi_replace (gsi, bind, false);
2220 gimple_seq seq = NULL;
2221 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2222 tree cond = create_tmp_var_raw (integer_type_node);
2223 DECL_CONTEXT (cond) = current_function_decl;
2224 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2225 gimple_bind_set_vars (bind, cond);
2226 gimple_call_set_lhs (g, cond);
2227 gimple_seq_add_stmt (&seq, g);
2228 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2229 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2230 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2231 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2232 gimple_seq_add_stmt (&seq, g);
2233 g = gimple_build_label (lab1);
2234 gimple_seq_add_stmt (&seq, g);
2235 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2236 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2237 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2238 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2239 gimple_omp_for_set_clauses (new_stmt, clause);
2240 gimple_seq_add_stmt (&seq, new_stmt);
2241 g = gimple_build_goto (lab3);
2242 gimple_seq_add_stmt (&seq, g);
2243 g = gimple_build_label (lab2);
2244 gimple_seq_add_stmt (&seq, g);
2245 gimple_seq_add_stmt (&seq, stmt);
2246 g = gimple_build_label (lab3);
2247 gimple_seq_add_stmt (&seq, g);
2248 gimple_bind_set_body (bind, seq);
2249 update_stmt (bind);
2250 scan_omp_for (new_stmt, outer_ctx);
2251 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2254 /* Scan an OpenMP sections directive. */
2256 static void
2257 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2259 omp_context *ctx;
2261 ctx = new_omp_context (stmt, outer_ctx);
2262 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2263 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2266 /* Scan an OpenMP single directive. */
2268 static void
2269 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2271 omp_context *ctx;
2272 tree name;
2274 ctx = new_omp_context (stmt, outer_ctx);
2275 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2276 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2277 name = create_tmp_var_name (".omp_copy_s");
2278 name = build_decl (gimple_location (stmt),
2279 TYPE_DECL, name, ctx->record_type);
2280 TYPE_NAME (ctx->record_type) = name;
2282 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2283 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2285 if (TYPE_FIELDS (ctx->record_type) == NULL)
2286 ctx->record_type = NULL;
2287 else
2288 layout_type (ctx->record_type);
2291 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2292 used in the corresponding offloaded function are restrict. */
2294 static bool
2295 omp_target_base_pointers_restrict_p (tree clauses)
2297 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2298 used by OpenACC. */
2299 if (flag_openacc == 0)
2300 return false;
2302 /* I. Basic example:
2304 void foo (void)
2306 unsigned int a[2], b[2];
2308 #pragma acc kernels \
2309 copyout (a) \
2310 copyout (b)
2312 a[0] = 0;
2313 b[0] = 1;
2317 After gimplification, we have:
2319 #pragma omp target oacc_kernels \
2320 map(force_from:a [len: 8]) \
2321 map(force_from:b [len: 8])
2323 a[0] = 0;
2324 b[0] = 1;
2327 Because both mappings have the force prefix, we know that they will be
2328 allocated when calling the corresponding offloaded function, which means we
2329 can mark the base pointers for a and b in the offloaded function as
2330 restrict. */
2332 tree c;
2333 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2335 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2336 return false;
2338 switch (OMP_CLAUSE_MAP_KIND (c))
2340 case GOMP_MAP_FORCE_ALLOC:
2341 case GOMP_MAP_FORCE_TO:
2342 case GOMP_MAP_FORCE_FROM:
2343 case GOMP_MAP_FORCE_TOFROM:
2344 break;
2345 default:
2346 return false;
2350 return true;
2353 /* Scan a GIMPLE_OMP_TARGET. */
2355 static void
2356 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2358 omp_context *ctx;
2359 tree name;
2360 bool offloaded = is_gimple_omp_offloaded (stmt);
2361 tree clauses = gimple_omp_target_clauses (stmt);
2363 ctx = new_omp_context (stmt, outer_ctx);
2364 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2365 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2366 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2367 name = create_tmp_var_name (".omp_data_t");
2368 name = build_decl (gimple_location (stmt),
2369 TYPE_DECL, name, ctx->record_type);
2370 DECL_ARTIFICIAL (name) = 1;
2371 DECL_NAMELESS (name) = 1;
2372 TYPE_NAME (ctx->record_type) = name;
2373 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2375 bool base_pointers_restrict = false;
2376 if (offloaded)
2378 create_omp_child_function (ctx, false);
2379 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2381 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2382 if (base_pointers_restrict
2383 && dump_file && (dump_flags & TDF_DETAILS))
2384 fprintf (dump_file,
2385 "Base pointers in offloaded function are restrict\n");
2388 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2389 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2391 if (TYPE_FIELDS (ctx->record_type) == NULL)
2392 ctx->record_type = ctx->receiver_decl = NULL;
2393 else
2395 TYPE_FIELDS (ctx->record_type)
2396 = nreverse (TYPE_FIELDS (ctx->record_type));
2397 if (flag_checking)
2399 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2400 for (tree field = TYPE_FIELDS (ctx->record_type);
2401 field;
2402 field = DECL_CHAIN (field))
2403 gcc_assert (DECL_ALIGN (field) == align);
2405 layout_type (ctx->record_type);
2406 if (offloaded)
2407 fixup_child_record_type (ctx);
2411 /* Scan an OpenMP teams directive. */
2413 static void
2414 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2416 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2417 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2418 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2421 /* Check nesting restrictions. */
2422 static bool
2423 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2425 tree c;
2427 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2428 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2429 the original copy of its contents. */
2430 return true;
2432 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2433 inside an OpenACC CTX. */
2434 if (!(is_gimple_omp (stmt)
2435 && is_gimple_omp_oacc (stmt))
2436 /* Except for atomic codes that we share with OpenMP. */
2437 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2438 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2440 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2442 error_at (gimple_location (stmt),
2443 "non-OpenACC construct inside of OpenACC routine");
2444 return false;
2446 else
2447 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2448 if (is_gimple_omp (octx->stmt)
2449 && is_gimple_omp_oacc (octx->stmt))
2451 error_at (gimple_location (stmt),
2452 "non-OpenACC construct inside of OpenACC region");
2453 return false;
2457 if (ctx != NULL)
2459 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2460 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2462 c = NULL_TREE;
2463 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2465 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2466 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2468 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2469 && (ctx->outer == NULL
2470 || !gimple_omp_for_combined_into_p (ctx->stmt)
2471 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2472 || (gimple_omp_for_kind (ctx->outer->stmt)
2473 != GF_OMP_FOR_KIND_FOR)
2474 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2476 error_at (gimple_location (stmt),
2477 "%<ordered simd threads%> must be closely "
2478 "nested inside of %<for simd%> region");
2479 return false;
2481 return true;
2484 error_at (gimple_location (stmt),
2485 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2486 " may not be nested inside %<simd%> region");
2487 return false;
2489 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2491 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2492 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2493 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2494 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2496 error_at (gimple_location (stmt),
2497 "only %<distribute%> or %<parallel%> regions are "
2498 "allowed to be strictly nested inside %<teams%> "
2499 "region");
2500 return false;
2504 switch (gimple_code (stmt))
2506 case GIMPLE_OMP_FOR:
2507 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2508 return true;
2509 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2511 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2513 error_at (gimple_location (stmt),
2514 "%<distribute%> region must be strictly nested "
2515 "inside %<teams%> construct");
2516 return false;
2518 return true;
2520 /* We split taskloop into task and nested taskloop in it. */
2521 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2522 return true;
2523 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2525 bool ok = false;
2527 if (ctx)
2528 switch (gimple_code (ctx->stmt))
2530 case GIMPLE_OMP_FOR:
2531 ok = (gimple_omp_for_kind (ctx->stmt)
2532 == GF_OMP_FOR_KIND_OACC_LOOP);
2533 break;
2535 case GIMPLE_OMP_TARGET:
2536 switch (gimple_omp_target_kind (ctx->stmt))
2538 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2539 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2540 ok = true;
2541 break;
2543 default:
2544 break;
2547 default:
2548 break;
2550 else if (oacc_get_fn_attrib (current_function_decl))
2551 ok = true;
2552 if (!ok)
2554 error_at (gimple_location (stmt),
2555 "OpenACC loop directive must be associated with"
2556 " an OpenACC compute region");
2557 return false;
2560 /* FALLTHRU */
2561 case GIMPLE_CALL:
2562 if (is_gimple_call (stmt)
2563 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2564 == BUILT_IN_GOMP_CANCEL
2565 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2566 == BUILT_IN_GOMP_CANCELLATION_POINT))
2568 const char *bad = NULL;
2569 const char *kind = NULL;
2570 const char *construct
2571 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2572 == BUILT_IN_GOMP_CANCEL)
2573 ? "#pragma omp cancel"
2574 : "#pragma omp cancellation point";
2575 if (ctx == NULL)
2577 error_at (gimple_location (stmt), "orphaned %qs construct",
2578 construct);
2579 return false;
2581 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2582 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2583 : 0)
2585 case 1:
2586 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2587 bad = "#pragma omp parallel";
2588 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2589 == BUILT_IN_GOMP_CANCEL
2590 && !integer_zerop (gimple_call_arg (stmt, 1)))
2591 ctx->cancellable = true;
2592 kind = "parallel";
2593 break;
2594 case 2:
2595 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2596 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2597 bad = "#pragma omp for";
2598 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2599 == BUILT_IN_GOMP_CANCEL
2600 && !integer_zerop (gimple_call_arg (stmt, 1)))
2602 ctx->cancellable = true;
2603 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2604 OMP_CLAUSE_NOWAIT))
2605 warning_at (gimple_location (stmt), 0,
2606 "%<#pragma omp cancel for%> inside "
2607 "%<nowait%> for construct");
2608 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2609 OMP_CLAUSE_ORDERED))
2610 warning_at (gimple_location (stmt), 0,
2611 "%<#pragma omp cancel for%> inside "
2612 "%<ordered%> for construct");
2614 kind = "for";
2615 break;
2616 case 4:
2617 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2618 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2619 bad = "#pragma omp sections";
2620 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2621 == BUILT_IN_GOMP_CANCEL
2622 && !integer_zerop (gimple_call_arg (stmt, 1)))
2624 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2626 ctx->cancellable = true;
2627 if (omp_find_clause (gimple_omp_sections_clauses
2628 (ctx->stmt),
2629 OMP_CLAUSE_NOWAIT))
2630 warning_at (gimple_location (stmt), 0,
2631 "%<#pragma omp cancel sections%> inside "
2632 "%<nowait%> sections construct");
2634 else
2636 gcc_assert (ctx->outer
2637 && gimple_code (ctx->outer->stmt)
2638 == GIMPLE_OMP_SECTIONS);
2639 ctx->outer->cancellable = true;
2640 if (omp_find_clause (gimple_omp_sections_clauses
2641 (ctx->outer->stmt),
2642 OMP_CLAUSE_NOWAIT))
2643 warning_at (gimple_location (stmt), 0,
2644 "%<#pragma omp cancel sections%> inside "
2645 "%<nowait%> sections construct");
2648 kind = "sections";
2649 break;
2650 case 8:
2651 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2652 bad = "#pragma omp task";
2653 else
2655 for (omp_context *octx = ctx->outer;
2656 octx; octx = octx->outer)
2658 switch (gimple_code (octx->stmt))
2660 case GIMPLE_OMP_TASKGROUP:
2661 break;
2662 case GIMPLE_OMP_TARGET:
2663 if (gimple_omp_target_kind (octx->stmt)
2664 != GF_OMP_TARGET_KIND_REGION)
2665 continue;
2666 /* FALLTHRU */
2667 case GIMPLE_OMP_PARALLEL:
2668 case GIMPLE_OMP_TEAMS:
2669 error_at (gimple_location (stmt),
2670 "%<%s taskgroup%> construct not closely "
2671 "nested inside of %<taskgroup%> region",
2672 construct);
2673 return false;
2674 default:
2675 continue;
2677 break;
2679 ctx->cancellable = true;
2681 kind = "taskgroup";
2682 break;
2683 default:
2684 error_at (gimple_location (stmt), "invalid arguments");
2685 return false;
2687 if (bad)
2689 error_at (gimple_location (stmt),
2690 "%<%s %s%> construct not closely nested inside of %qs",
2691 construct, kind, bad);
2692 return false;
2695 /* FALLTHRU */
2696 case GIMPLE_OMP_SECTIONS:
2697 case GIMPLE_OMP_SINGLE:
2698 for (; ctx != NULL; ctx = ctx->outer)
2699 switch (gimple_code (ctx->stmt))
2701 case GIMPLE_OMP_FOR:
2702 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2703 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2704 break;
2705 /* FALLTHRU */
2706 case GIMPLE_OMP_SECTIONS:
2707 case GIMPLE_OMP_SINGLE:
2708 case GIMPLE_OMP_ORDERED:
2709 case GIMPLE_OMP_MASTER:
2710 case GIMPLE_OMP_TASK:
2711 case GIMPLE_OMP_CRITICAL:
2712 if (is_gimple_call (stmt))
2714 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2715 != BUILT_IN_GOMP_BARRIER)
2716 return true;
2717 error_at (gimple_location (stmt),
2718 "barrier region may not be closely nested inside "
2719 "of work-sharing, %<critical%>, %<ordered%>, "
2720 "%<master%>, explicit %<task%> or %<taskloop%> "
2721 "region");
2722 return false;
2724 error_at (gimple_location (stmt),
2725 "work-sharing region may not be closely nested inside "
2726 "of work-sharing, %<critical%>, %<ordered%>, "
2727 "%<master%>, explicit %<task%> or %<taskloop%> region");
2728 return false;
2729 case GIMPLE_OMP_PARALLEL:
2730 case GIMPLE_OMP_TEAMS:
2731 return true;
2732 case GIMPLE_OMP_TARGET:
2733 if (gimple_omp_target_kind (ctx->stmt)
2734 == GF_OMP_TARGET_KIND_REGION)
2735 return true;
2736 break;
2737 default:
2738 break;
2740 break;
2741 case GIMPLE_OMP_MASTER:
2742 for (; ctx != NULL; ctx = ctx->outer)
2743 switch (gimple_code (ctx->stmt))
2745 case GIMPLE_OMP_FOR:
2746 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2747 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2748 break;
2749 /* FALLTHRU */
2750 case GIMPLE_OMP_SECTIONS:
2751 case GIMPLE_OMP_SINGLE:
2752 case GIMPLE_OMP_TASK:
2753 error_at (gimple_location (stmt),
2754 "%<master%> region may not be closely nested inside "
2755 "of work-sharing, explicit %<task%> or %<taskloop%> "
2756 "region");
2757 return false;
2758 case GIMPLE_OMP_PARALLEL:
2759 case GIMPLE_OMP_TEAMS:
2760 return true;
2761 case GIMPLE_OMP_TARGET:
2762 if (gimple_omp_target_kind (ctx->stmt)
2763 == GF_OMP_TARGET_KIND_REGION)
2764 return true;
2765 break;
2766 default:
2767 break;
2769 break;
2770 case GIMPLE_OMP_TASK:
2771 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2772 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2773 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2774 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2776 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2777 error_at (OMP_CLAUSE_LOCATION (c),
2778 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2779 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2780 return false;
2782 break;
2783 case GIMPLE_OMP_ORDERED:
2784 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2785 c; c = OMP_CLAUSE_CHAIN (c))
2787 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2789 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2790 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2791 continue;
2793 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2794 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2795 || kind == OMP_CLAUSE_DEPEND_SINK)
2797 tree oclause;
2798 /* Look for containing ordered(N) loop. */
2799 if (ctx == NULL
2800 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2801 || (oclause
2802 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2803 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2805 error_at (OMP_CLAUSE_LOCATION (c),
2806 "%<ordered%> construct with %<depend%> clause "
2807 "must be closely nested inside an %<ordered%> "
2808 "loop");
2809 return false;
2811 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2813 error_at (OMP_CLAUSE_LOCATION (c),
2814 "%<ordered%> construct with %<depend%> clause "
2815 "must be closely nested inside a loop with "
2816 "%<ordered%> clause with a parameter");
2817 return false;
2820 else
2822 error_at (OMP_CLAUSE_LOCATION (c),
2823 "invalid depend kind in omp %<ordered%> %<depend%>");
2824 return false;
2827 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2828 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2830 /* ordered simd must be closely nested inside of simd region,
2831 and simd region must not encounter constructs other than
2832 ordered simd, therefore ordered simd may be either orphaned,
2833 or ctx->stmt must be simd. The latter case is handled already
2834 earlier. */
2835 if (ctx != NULL)
2837 error_at (gimple_location (stmt),
2838 "%<ordered%> %<simd%> must be closely nested inside "
2839 "%<simd%> region");
2840 return false;
2843 for (; ctx != NULL; ctx = ctx->outer)
2844 switch (gimple_code (ctx->stmt))
2846 case GIMPLE_OMP_CRITICAL:
2847 case GIMPLE_OMP_TASK:
2848 case GIMPLE_OMP_ORDERED:
2849 ordered_in_taskloop:
2850 error_at (gimple_location (stmt),
2851 "%<ordered%> region may not be closely nested inside "
2852 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2853 "%<taskloop%> region");
2854 return false;
2855 case GIMPLE_OMP_FOR:
2856 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2857 goto ordered_in_taskloop;
2858 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2859 OMP_CLAUSE_ORDERED) == NULL)
2861 error_at (gimple_location (stmt),
2862 "%<ordered%> region must be closely nested inside "
2863 "a loop region with an %<ordered%> clause");
2864 return false;
2866 return true;
2867 case GIMPLE_OMP_TARGET:
2868 if (gimple_omp_target_kind (ctx->stmt)
2869 != GF_OMP_TARGET_KIND_REGION)
2870 break;
2871 /* FALLTHRU */
2872 case GIMPLE_OMP_PARALLEL:
2873 case GIMPLE_OMP_TEAMS:
2874 error_at (gimple_location (stmt),
2875 "%<ordered%> region must be closely nested inside "
2876 "a loop region with an %<ordered%> clause");
2877 return false;
2878 default:
2879 break;
2881 break;
2882 case GIMPLE_OMP_CRITICAL:
2884 tree this_stmt_name
2885 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2886 for (; ctx != NULL; ctx = ctx->outer)
2887 if (gomp_critical *other_crit
2888 = dyn_cast <gomp_critical *> (ctx->stmt))
2889 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2891 error_at (gimple_location (stmt),
2892 "%<critical%> region may not be nested inside "
2893 "a %<critical%> region with the same name");
2894 return false;
2897 break;
2898 case GIMPLE_OMP_TEAMS:
2899 if (ctx == NULL
2900 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2901 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2903 error_at (gimple_location (stmt),
2904 "%<teams%> construct not closely nested inside of "
2905 "%<target%> construct");
2906 return false;
2908 break;
2909 case GIMPLE_OMP_TARGET:
2910 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2911 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2912 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2913 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2915 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2916 error_at (OMP_CLAUSE_LOCATION (c),
2917 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2918 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2919 return false;
2921 if (is_gimple_omp_offloaded (stmt)
2922 && oacc_get_fn_attrib (cfun->decl) != NULL)
2924 error_at (gimple_location (stmt),
2925 "OpenACC region inside of OpenACC routine, nested "
2926 "parallelism not supported yet");
2927 return false;
2929 for (; ctx != NULL; ctx = ctx->outer)
2931 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2933 if (is_gimple_omp (stmt)
2934 && is_gimple_omp_oacc (stmt)
2935 && is_gimple_omp (ctx->stmt))
2937 error_at (gimple_location (stmt),
2938 "OpenACC construct inside of non-OpenACC region");
2939 return false;
2941 continue;
2944 const char *stmt_name, *ctx_stmt_name;
2945 switch (gimple_omp_target_kind (stmt))
2947 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2948 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2949 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2950 case GF_OMP_TARGET_KIND_ENTER_DATA:
2951 stmt_name = "target enter data"; break;
2952 case GF_OMP_TARGET_KIND_EXIT_DATA:
2953 stmt_name = "target exit data"; break;
2954 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2955 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2956 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2957 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2958 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2959 stmt_name = "enter/exit data"; break;
2960 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2961 break;
2962 default: gcc_unreachable ();
2964 switch (gimple_omp_target_kind (ctx->stmt))
2966 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2967 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2968 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2969 ctx_stmt_name = "parallel"; break;
2970 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2971 ctx_stmt_name = "kernels"; break;
2972 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2973 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2974 ctx_stmt_name = "host_data"; break;
2975 default: gcc_unreachable ();
2978 /* OpenACC/OpenMP mismatch? */
2979 if (is_gimple_omp_oacc (stmt)
2980 != is_gimple_omp_oacc (ctx->stmt))
2982 error_at (gimple_location (stmt),
2983 "%s %qs construct inside of %s %qs region",
2984 (is_gimple_omp_oacc (stmt)
2985 ? "OpenACC" : "OpenMP"), stmt_name,
2986 (is_gimple_omp_oacc (ctx->stmt)
2987 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2988 return false;
2990 if (is_gimple_omp_offloaded (ctx->stmt))
2992 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2993 if (is_gimple_omp_oacc (ctx->stmt))
2995 error_at (gimple_location (stmt),
2996 "%qs construct inside of %qs region",
2997 stmt_name, ctx_stmt_name);
2998 return false;
3000 else
3002 warning_at (gimple_location (stmt), 0,
3003 "%qs construct inside of %qs region",
3004 stmt_name, ctx_stmt_name);
3008 break;
3009 default:
3010 break;
3012 return true;
3016 /* Helper function scan_omp.
3018 Callback for walk_tree or operators in walk_gimple_stmt used to
3019 scan for OMP directives in TP. */
3021 static tree
3022 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3024 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3025 omp_context *ctx = (omp_context *) wi->info;
3026 tree t = *tp;
3028 switch (TREE_CODE (t))
3030 case VAR_DECL:
3031 case PARM_DECL:
3032 case LABEL_DECL:
3033 case RESULT_DECL:
3034 if (ctx)
3036 tree repl = remap_decl (t, &ctx->cb);
3037 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3038 *tp = repl;
3040 break;
3042 default:
3043 if (ctx && TYPE_P (t))
3044 *tp = remap_type (t, &ctx->cb);
3045 else if (!DECL_P (t))
3047 *walk_subtrees = 1;
3048 if (ctx)
3050 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3051 if (tem != TREE_TYPE (t))
3053 if (TREE_CODE (t) == INTEGER_CST)
3054 *tp = wide_int_to_tree (tem, t);
3055 else
3056 TREE_TYPE (t) = tem;
3060 break;
3063 return NULL_TREE;
3066 /* Return true if FNDECL is a setjmp or a longjmp. */
3068 static bool
3069 setjmp_or_longjmp_p (const_tree fndecl)
3071 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3072 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3073 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3074 return true;
3076 tree declname = DECL_NAME (fndecl);
3077 if (!declname)
3078 return false;
3079 const char *name = IDENTIFIER_POINTER (declname);
3080 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3084 /* Helper function for scan_omp.
3086 Callback for walk_gimple_stmt used to scan for OMP directives in
3087 the current statement in GSI. */
3089 static tree
3090 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3091 struct walk_stmt_info *wi)
3093 gimple *stmt = gsi_stmt (*gsi);
3094 omp_context *ctx = (omp_context *) wi->info;
3096 if (gimple_has_location (stmt))
3097 input_location = gimple_location (stmt);
3099 /* Check the nesting restrictions. */
3100 bool remove = false;
3101 if (is_gimple_omp (stmt))
3102 remove = !check_omp_nesting_restrictions (stmt, ctx);
3103 else if (is_gimple_call (stmt))
3105 tree fndecl = gimple_call_fndecl (stmt);
3106 if (fndecl)
3108 if (setjmp_or_longjmp_p (fndecl)
3109 && ctx
3110 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3111 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3113 remove = true;
3114 error_at (gimple_location (stmt),
3115 "setjmp/longjmp inside simd construct");
3117 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3118 switch (DECL_FUNCTION_CODE (fndecl))
3120 case BUILT_IN_GOMP_BARRIER:
3121 case BUILT_IN_GOMP_CANCEL:
3122 case BUILT_IN_GOMP_CANCELLATION_POINT:
3123 case BUILT_IN_GOMP_TASKYIELD:
3124 case BUILT_IN_GOMP_TASKWAIT:
3125 case BUILT_IN_GOMP_TASKGROUP_START:
3126 case BUILT_IN_GOMP_TASKGROUP_END:
3127 remove = !check_omp_nesting_restrictions (stmt, ctx);
3128 break;
3129 default:
3130 break;
3134 if (remove)
3136 stmt = gimple_build_nop ();
3137 gsi_replace (gsi, stmt, false);
3140 *handled_ops_p = true;
3142 switch (gimple_code (stmt))
3144 case GIMPLE_OMP_PARALLEL:
3145 taskreg_nesting_level++;
3146 scan_omp_parallel (gsi, ctx);
3147 taskreg_nesting_level--;
3148 break;
3150 case GIMPLE_OMP_TASK:
3151 taskreg_nesting_level++;
3152 scan_omp_task (gsi, ctx);
3153 taskreg_nesting_level--;
3154 break;
3156 case GIMPLE_OMP_FOR:
3157 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3158 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3159 && omp_maybe_offloaded_ctx (ctx)
3160 && omp_max_simt_vf ())
3161 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3162 else
3163 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3164 break;
3166 case GIMPLE_OMP_SECTIONS:
3167 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3168 break;
3170 case GIMPLE_OMP_SINGLE:
3171 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3172 break;
3174 case GIMPLE_OMP_SECTION:
3175 case GIMPLE_OMP_MASTER:
3176 case GIMPLE_OMP_TASKGROUP:
3177 case GIMPLE_OMP_ORDERED:
3178 case GIMPLE_OMP_CRITICAL:
3179 case GIMPLE_OMP_GRID_BODY:
3180 ctx = new_omp_context (stmt, ctx);
3181 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3182 break;
3184 case GIMPLE_OMP_TARGET:
3185 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3186 break;
3188 case GIMPLE_OMP_TEAMS:
3189 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3190 break;
3192 case GIMPLE_BIND:
3194 tree var;
3196 *handled_ops_p = false;
3197 if (ctx)
3198 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3199 var ;
3200 var = DECL_CHAIN (var))
3201 insert_decl_map (&ctx->cb, var, var);
3203 break;
3204 default:
3205 *handled_ops_p = false;
3206 break;
3209 return NULL_TREE;
3213 /* Scan all the statements starting at the current statement. CTX
3214 contains context information about the OMP directives and
3215 clauses found during the scan. */
3217 static void
3218 scan_omp (gimple_seq *body_p, omp_context *ctx)
3220 location_t saved_location;
3221 struct walk_stmt_info wi;
3223 memset (&wi, 0, sizeof (wi));
3224 wi.info = ctx;
3225 wi.want_locations = true;
3227 saved_location = input_location;
3228 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3229 input_location = saved_location;
3232 /* Re-gimplification and code generation routines. */
3234 /* If a context was created for STMT when it was scanned, return it. */
3236 static omp_context *
3237 maybe_lookup_ctx (gimple *stmt)
3239 splay_tree_node n;
3240 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3241 return n ? (omp_context *) n->value : NULL;
3245 /* Find the mapping for DECL in CTX or the immediately enclosing
3246 context that has a mapping for DECL.
3248 If CTX is a nested parallel directive, we may have to use the decl
3249 mappings created in CTX's parent context. Suppose that we have the
3250 following parallel nesting (variable UIDs showed for clarity):
3252 iD.1562 = 0;
3253 #omp parallel shared(iD.1562) -> outer parallel
3254 iD.1562 = iD.1562 + 1;
3256 #omp parallel shared (iD.1562) -> inner parallel
3257 iD.1562 = iD.1562 - 1;
3259 Each parallel structure will create a distinct .omp_data_s structure
3260 for copying iD.1562 in/out of the directive:
3262 outer parallel .omp_data_s.1.i -> iD.1562
3263 inner parallel .omp_data_s.2.i -> iD.1562
3265 A shared variable mapping will produce a copy-out operation before
3266 the parallel directive and a copy-in operation after it. So, in
3267 this case we would have:
3269 iD.1562 = 0;
3270 .omp_data_o.1.i = iD.1562;
3271 #omp parallel shared(iD.1562) -> outer parallel
3272 .omp_data_i.1 = &.omp_data_o.1
3273 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3275 .omp_data_o.2.i = iD.1562; -> **
3276 #omp parallel shared(iD.1562) -> inner parallel
3277 .omp_data_i.2 = &.omp_data_o.2
3278 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3281 ** This is a problem. The symbol iD.1562 cannot be referenced
3282 inside the body of the outer parallel region. But since we are
3283 emitting this copy operation while expanding the inner parallel
3284 directive, we need to access the CTX structure of the outer
3285 parallel directive to get the correct mapping:
3287 .omp_data_o.2.i = .omp_data_i.1->i
3289 Since there may be other workshare or parallel directives enclosing
3290 the parallel directive, it may be necessary to walk up the context
3291 parent chain. This is not a problem in general because nested
3292 parallelism happens only rarely. */
3294 static tree
3295 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3297 tree t;
3298 omp_context *up;
3300 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3301 t = maybe_lookup_decl (decl, up);
3303 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3305 return t ? t : decl;
3309 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3310 in outer contexts. */
3312 static tree
3313 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3315 tree t = NULL;
3316 omp_context *up;
3318 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3319 t = maybe_lookup_decl (decl, up);
3321 return t ? t : decl;
3325 /* Construct the initialization value for reduction operation OP. */
3327 tree
3328 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3330 switch (op)
3332 case PLUS_EXPR:
3333 case MINUS_EXPR:
3334 case BIT_IOR_EXPR:
3335 case BIT_XOR_EXPR:
3336 case TRUTH_OR_EXPR:
3337 case TRUTH_ORIF_EXPR:
3338 case TRUTH_XOR_EXPR:
3339 case NE_EXPR:
3340 return build_zero_cst (type);
3342 case MULT_EXPR:
3343 case TRUTH_AND_EXPR:
3344 case TRUTH_ANDIF_EXPR:
3345 case EQ_EXPR:
3346 return fold_convert_loc (loc, type, integer_one_node);
3348 case BIT_AND_EXPR:
3349 return fold_convert_loc (loc, type, integer_minus_one_node);
3351 case MAX_EXPR:
3352 if (SCALAR_FLOAT_TYPE_P (type))
3354 REAL_VALUE_TYPE max, min;
3355 if (HONOR_INFINITIES (type))
3357 real_inf (&max);
3358 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3360 else
3361 real_maxval (&min, 1, TYPE_MODE (type));
3362 return build_real (type, min);
3364 else if (POINTER_TYPE_P (type))
3366 wide_int min
3367 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3368 return wide_int_to_tree (type, min);
3370 else
3372 gcc_assert (INTEGRAL_TYPE_P (type));
3373 return TYPE_MIN_VALUE (type);
3376 case MIN_EXPR:
3377 if (SCALAR_FLOAT_TYPE_P (type))
3379 REAL_VALUE_TYPE max;
3380 if (HONOR_INFINITIES (type))
3381 real_inf (&max);
3382 else
3383 real_maxval (&max, 0, TYPE_MODE (type));
3384 return build_real (type, max);
3386 else if (POINTER_TYPE_P (type))
3388 wide_int max
3389 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3390 return wide_int_to_tree (type, max);
3392 else
3394 gcc_assert (INTEGRAL_TYPE_P (type));
3395 return TYPE_MAX_VALUE (type);
3398 default:
3399 gcc_unreachable ();
3403 /* Construct the initialization value for reduction CLAUSE. */
3405 tree
3406 omp_reduction_init (tree clause, tree type)
3408 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3409 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3412 /* Return alignment to be assumed for var in CLAUSE, which should be
3413 OMP_CLAUSE_ALIGNED. */
3415 static tree
3416 omp_clause_aligned_alignment (tree clause)
3418 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3419 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3421 /* Otherwise return implementation defined alignment. */
3422 unsigned int al = 1;
3423 machine_mode mode, vmode;
3424 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3425 if (vs)
3426 vs = 1 << floor_log2 (vs);
3427 static enum mode_class classes[]
3428 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3429 for (int i = 0; i < 4; i += 2)
3430 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3431 mode != VOIDmode;
3432 mode = GET_MODE_WIDER_MODE (mode))
3434 vmode = targetm.vectorize.preferred_simd_mode (mode);
3435 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3436 continue;
3437 while (vs
3438 && GET_MODE_SIZE (vmode) < vs
3439 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3440 vmode = GET_MODE_2XWIDER_MODE (vmode);
3442 tree type = lang_hooks.types.type_for_mode (mode, 1);
3443 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3444 continue;
3445 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3446 / GET_MODE_SIZE (mode));
3447 if (TYPE_MODE (type) != vmode)
3448 continue;
3449 if (TYPE_ALIGN_UNIT (type) > al)
3450 al = TYPE_ALIGN_UNIT (type);
3452 return build_int_cst (integer_type_node, al);
3456 /* This structure is part of the interface between lower_rec_simd_input_clauses
3457 and lower_rec_input_clauses. */
3459 struct omplow_simd_context {
3460 tree idx;
3461 tree lane;
3462 vec<tree, va_heap> simt_eargs;
3463 gimple_seq simt_dlist;
3464 int max_vf;
3465 bool is_simt;
3468 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3469 privatization. */
3471 static bool
3472 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3473 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3475 if (sctx->max_vf == 0)
3477 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3478 if (sctx->max_vf > 1)
3480 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3481 OMP_CLAUSE_SAFELEN);
3482 if (c
3483 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3484 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3485 sctx->max_vf = 1;
3486 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3487 sctx->max_vf) == -1)
3488 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3490 if (sctx->max_vf > 1)
3492 sctx->idx = create_tmp_var (unsigned_type_node);
3493 sctx->lane = create_tmp_var (unsigned_type_node);
3496 if (sctx->max_vf == 1)
3497 return false;
3499 if (sctx->is_simt)
3501 if (is_gimple_reg (new_var))
3503 ivar = lvar = new_var;
3504 return true;
3506 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3507 ivar = lvar = create_tmp_var (type);
3508 TREE_ADDRESSABLE (ivar) = 1;
3509 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3510 NULL, DECL_ATTRIBUTES (ivar));
3511 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3512 tree clobber = build_constructor (type, NULL);
3513 TREE_THIS_VOLATILE (clobber) = 1;
3514 gimple *g = gimple_build_assign (ivar, clobber);
3515 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3517 else
3519 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3520 tree avar = create_tmp_var_raw (atype);
3521 if (TREE_ADDRESSABLE (new_var))
3522 TREE_ADDRESSABLE (avar) = 1;
3523 DECL_ATTRIBUTES (avar)
3524 = tree_cons (get_identifier ("omp simd array"), NULL,
3525 DECL_ATTRIBUTES (avar));
3526 gimple_add_tmp_var (avar);
3527 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3528 NULL_TREE, NULL_TREE);
3529 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3530 NULL_TREE, NULL_TREE);
3532 if (DECL_P (new_var))
3534 SET_DECL_VALUE_EXPR (new_var, lvar);
3535 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3537 return true;
3540 /* Helper function of lower_rec_input_clauses. For a reference
3541 in simd reduction, add an underlying variable it will reference. */
3543 static void
3544 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3546 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3547 if (TREE_CONSTANT (z))
3549 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3550 get_name (new_vard));
3551 gimple_add_tmp_var (z);
3552 TREE_ADDRESSABLE (z) = 1;
3553 z = build_fold_addr_expr_loc (loc, z);
3554 gimplify_assign (new_vard, z, ilist);
3558 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3559 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3560 private variables. Initialization statements go in ILIST, while calls
3561 to destructors go in DLIST. */
3563 static void
3564 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3565 omp_context *ctx, struct omp_for_data *fd)
3567 tree c, dtor, copyin_seq, x, ptr;
3568 bool copyin_by_ref = false;
3569 bool lastprivate_firstprivate = false;
3570 bool reduction_omp_orig_ref = false;
3571 int pass;
3572 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3573 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3574 omplow_simd_context sctx = omplow_simd_context ();
3575 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3576 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3577 gimple_seq llist[3] = { };
3579 copyin_seq = NULL;
3580 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3582 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3583 with data sharing clauses referencing variable sized vars. That
3584 is unnecessarily hard to support and very unlikely to result in
3585 vectorized code anyway. */
3586 if (is_simd)
3587 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3588 switch (OMP_CLAUSE_CODE (c))
3590 case OMP_CLAUSE_LINEAR:
3591 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3592 sctx.max_vf = 1;
3593 /* FALLTHRU */
3594 case OMP_CLAUSE_PRIVATE:
3595 case OMP_CLAUSE_FIRSTPRIVATE:
3596 case OMP_CLAUSE_LASTPRIVATE:
3597 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3598 sctx.max_vf = 1;
3599 break;
3600 case OMP_CLAUSE_REDUCTION:
3601 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3602 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3603 sctx.max_vf = 1;
3604 break;
3605 default:
3606 continue;
3609 /* Add a placeholder for simduid. */
3610 if (sctx.is_simt && sctx.max_vf != 1)
3611 sctx.simt_eargs.safe_push (NULL_TREE);
3613 /* Do all the fixed sized types in the first pass, and the variable sized
3614 types in the second pass. This makes sure that the scalar arguments to
3615 the variable sized types are processed before we use them in the
3616 variable sized operations. */
3617 for (pass = 0; pass < 2; ++pass)
3619 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3621 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3622 tree var, new_var;
3623 bool by_ref;
3624 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3626 switch (c_kind)
3628 case OMP_CLAUSE_PRIVATE:
3629 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3630 continue;
3631 break;
3632 case OMP_CLAUSE_SHARED:
3633 /* Ignore shared directives in teams construct. */
3634 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3635 continue;
3636 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3638 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3639 || is_global_var (OMP_CLAUSE_DECL (c)));
3640 continue;
3642 case OMP_CLAUSE_FIRSTPRIVATE:
3643 case OMP_CLAUSE_COPYIN:
3644 break;
3645 case OMP_CLAUSE_LINEAR:
3646 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3647 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3648 lastprivate_firstprivate = true;
3649 break;
3650 case OMP_CLAUSE_REDUCTION:
3651 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3652 reduction_omp_orig_ref = true;
3653 break;
3654 case OMP_CLAUSE__LOOPTEMP_:
3655 /* Handle _looptemp_ clauses only on parallel/task. */
3656 if (fd)
3657 continue;
3658 break;
3659 case OMP_CLAUSE_LASTPRIVATE:
3660 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3662 lastprivate_firstprivate = true;
3663 if (pass != 0 || is_taskloop_ctx (ctx))
3664 continue;
3666 /* Even without corresponding firstprivate, if
3667 decl is Fortran allocatable, it needs outer var
3668 reference. */
3669 else if (pass == 0
3670 && lang_hooks.decls.omp_private_outer_ref
3671 (OMP_CLAUSE_DECL (c)))
3672 lastprivate_firstprivate = true;
3673 break;
3674 case OMP_CLAUSE_ALIGNED:
3675 if (pass == 0)
3676 continue;
3677 var = OMP_CLAUSE_DECL (c);
3678 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3679 && !is_global_var (var))
3681 new_var = maybe_lookup_decl (var, ctx);
3682 if (new_var == NULL_TREE)
3683 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3684 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3685 tree alarg = omp_clause_aligned_alignment (c);
3686 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3687 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3688 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3689 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3690 gimplify_and_add (x, ilist);
3692 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3693 && is_global_var (var))
3695 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3696 new_var = lookup_decl (var, ctx);
3697 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3698 t = build_fold_addr_expr_loc (clause_loc, t);
3699 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3700 tree alarg = omp_clause_aligned_alignment (c);
3701 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3702 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3703 t = fold_convert_loc (clause_loc, ptype, t);
3704 x = create_tmp_var (ptype);
3705 t = build2 (MODIFY_EXPR, ptype, x, t);
3706 gimplify_and_add (t, ilist);
3707 t = build_simple_mem_ref_loc (clause_loc, x);
3708 SET_DECL_VALUE_EXPR (new_var, t);
3709 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3711 continue;
3712 default:
3713 continue;
3716 new_var = var = OMP_CLAUSE_DECL (c);
3717 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3719 var = TREE_OPERAND (var, 0);
3720 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3721 var = TREE_OPERAND (var, 0);
3722 if (TREE_CODE (var) == INDIRECT_REF
3723 || TREE_CODE (var) == ADDR_EXPR)
3724 var = TREE_OPERAND (var, 0);
3725 if (is_variable_sized (var))
3727 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3728 var = DECL_VALUE_EXPR (var);
3729 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3730 var = TREE_OPERAND (var, 0);
3731 gcc_assert (DECL_P (var));
3733 new_var = var;
3735 if (c_kind != OMP_CLAUSE_COPYIN)
3736 new_var = lookup_decl (var, ctx);
3738 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3740 if (pass != 0)
3741 continue;
3743 /* C/C++ array section reductions. */
3744 else if (c_kind == OMP_CLAUSE_REDUCTION
3745 && var != OMP_CLAUSE_DECL (c))
3747 if (pass == 0)
3748 continue;
3750 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3751 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3752 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3754 tree b = TREE_OPERAND (orig_var, 1);
3755 b = maybe_lookup_decl (b, ctx);
3756 if (b == NULL)
3758 b = TREE_OPERAND (orig_var, 1);
3759 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3761 if (integer_zerop (bias))
3762 bias = b;
3763 else
3765 bias = fold_convert_loc (clause_loc,
3766 TREE_TYPE (b), bias);
3767 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3768 TREE_TYPE (b), b, bias);
3770 orig_var = TREE_OPERAND (orig_var, 0);
3772 if (TREE_CODE (orig_var) == INDIRECT_REF
3773 || TREE_CODE (orig_var) == ADDR_EXPR)
3774 orig_var = TREE_OPERAND (orig_var, 0);
3775 tree d = OMP_CLAUSE_DECL (c);
3776 tree type = TREE_TYPE (d);
3777 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3778 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3779 const char *name = get_name (orig_var);
3780 if (TREE_CONSTANT (v))
3782 x = create_tmp_var_raw (type, name);
3783 gimple_add_tmp_var (x);
3784 TREE_ADDRESSABLE (x) = 1;
3785 x = build_fold_addr_expr_loc (clause_loc, x);
3787 else
3789 tree atmp
3790 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3791 tree t = maybe_lookup_decl (v, ctx);
3792 if (t)
3793 v = t;
3794 else
3795 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3796 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3797 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3798 TREE_TYPE (v), v,
3799 build_int_cst (TREE_TYPE (v), 1));
3800 t = fold_build2_loc (clause_loc, MULT_EXPR,
3801 TREE_TYPE (v), t,
3802 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3803 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3804 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3807 tree ptype = build_pointer_type (TREE_TYPE (type));
3808 x = fold_convert_loc (clause_loc, ptype, x);
3809 tree y = create_tmp_var (ptype, name);
3810 gimplify_assign (y, x, ilist);
3811 x = y;
3812 tree yb = y;
3814 if (!integer_zerop (bias))
3816 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3817 bias);
3818 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3820 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3821 pointer_sized_int_node, yb, bias);
3822 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3823 yb = create_tmp_var (ptype, name);
3824 gimplify_assign (yb, x, ilist);
3825 x = yb;
3828 d = TREE_OPERAND (d, 0);
3829 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3830 d = TREE_OPERAND (d, 0);
3831 if (TREE_CODE (d) == ADDR_EXPR)
3833 if (orig_var != var)
3835 gcc_assert (is_variable_sized (orig_var));
3836 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3838 gimplify_assign (new_var, x, ilist);
3839 tree new_orig_var = lookup_decl (orig_var, ctx);
3840 tree t = build_fold_indirect_ref (new_var);
3841 DECL_IGNORED_P (new_var) = 0;
3842 TREE_THIS_NOTRAP (t);
3843 SET_DECL_VALUE_EXPR (new_orig_var, t);
3844 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3846 else
3848 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3849 build_int_cst (ptype, 0));
3850 SET_DECL_VALUE_EXPR (new_var, x);
3851 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3854 else
3856 gcc_assert (orig_var == var);
3857 if (TREE_CODE (d) == INDIRECT_REF)
3859 x = create_tmp_var (ptype, name);
3860 TREE_ADDRESSABLE (x) = 1;
3861 gimplify_assign (x, yb, ilist);
3862 x = build_fold_addr_expr_loc (clause_loc, x);
3864 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3865 gimplify_assign (new_var, x, ilist);
3867 tree y1 = create_tmp_var (ptype, NULL);
3868 gimplify_assign (y1, y, ilist);
3869 tree i2 = NULL_TREE, y2 = NULL_TREE;
3870 tree body2 = NULL_TREE, end2 = NULL_TREE;
3871 tree y3 = NULL_TREE, y4 = NULL_TREE;
3872 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3874 y2 = create_tmp_var (ptype, NULL);
3875 gimplify_assign (y2, y, ilist);
3876 tree ref = build_outer_var_ref (var, ctx);
3877 /* For ref build_outer_var_ref already performs this. */
3878 if (TREE_CODE (d) == INDIRECT_REF)
3879 gcc_assert (omp_is_reference (var));
3880 else if (TREE_CODE (d) == ADDR_EXPR)
3881 ref = build_fold_addr_expr (ref);
3882 else if (omp_is_reference (var))
3883 ref = build_fold_addr_expr (ref);
3884 ref = fold_convert_loc (clause_loc, ptype, ref);
3885 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3886 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3888 y3 = create_tmp_var (ptype, NULL);
3889 gimplify_assign (y3, unshare_expr (ref), ilist);
3891 if (is_simd)
3893 y4 = create_tmp_var (ptype, NULL);
3894 gimplify_assign (y4, ref, dlist);
3897 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3898 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3899 tree body = create_artificial_label (UNKNOWN_LOCATION);
3900 tree end = create_artificial_label (UNKNOWN_LOCATION);
3901 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3902 if (y2)
3904 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3905 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3906 body2 = create_artificial_label (UNKNOWN_LOCATION);
3907 end2 = create_artificial_label (UNKNOWN_LOCATION);
3908 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3910 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3912 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3913 tree decl_placeholder
3914 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3915 SET_DECL_VALUE_EXPR (decl_placeholder,
3916 build_simple_mem_ref (y1));
3917 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3918 SET_DECL_VALUE_EXPR (placeholder,
3919 y3 ? build_simple_mem_ref (y3)
3920 : error_mark_node);
3921 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3922 x = lang_hooks.decls.omp_clause_default_ctor
3923 (c, build_simple_mem_ref (y1),
3924 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3925 if (x)
3926 gimplify_and_add (x, ilist);
3927 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3929 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3930 lower_omp (&tseq, ctx);
3931 gimple_seq_add_seq (ilist, tseq);
3933 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3934 if (is_simd)
3936 SET_DECL_VALUE_EXPR (decl_placeholder,
3937 build_simple_mem_ref (y2));
3938 SET_DECL_VALUE_EXPR (placeholder,
3939 build_simple_mem_ref (y4));
3940 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3941 lower_omp (&tseq, ctx);
3942 gimple_seq_add_seq (dlist, tseq);
3943 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3945 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3946 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3947 x = lang_hooks.decls.omp_clause_dtor
3948 (c, build_simple_mem_ref (y2));
3949 if (x)
3951 gimple_seq tseq = NULL;
3952 dtor = x;
3953 gimplify_stmt (&dtor, &tseq);
3954 gimple_seq_add_seq (dlist, tseq);
3957 else
3959 x = omp_reduction_init (c, TREE_TYPE (type));
3960 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3962 /* reduction(-:var) sums up the partial results, so it
3963 acts identically to reduction(+:var). */
3964 if (code == MINUS_EXPR)
3965 code = PLUS_EXPR;
3967 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3968 if (is_simd)
3970 x = build2 (code, TREE_TYPE (type),
3971 build_simple_mem_ref (y4),
3972 build_simple_mem_ref (y2));
3973 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3976 gimple *g
3977 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3978 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3979 gimple_seq_add_stmt (ilist, g);
3980 if (y3)
3982 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3983 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3984 gimple_seq_add_stmt (ilist, g);
3986 g = gimple_build_assign (i, PLUS_EXPR, i,
3987 build_int_cst (TREE_TYPE (i), 1));
3988 gimple_seq_add_stmt (ilist, g);
3989 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3990 gimple_seq_add_stmt (ilist, g);
3991 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3992 if (y2)
3994 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3995 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3996 gimple_seq_add_stmt (dlist, g);
3997 if (y4)
3999 g = gimple_build_assign
4000 (y4, POINTER_PLUS_EXPR, y4,
4001 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4002 gimple_seq_add_stmt (dlist, g);
4004 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4005 build_int_cst (TREE_TYPE (i2), 1));
4006 gimple_seq_add_stmt (dlist, g);
4007 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4008 gimple_seq_add_stmt (dlist, g);
4009 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4011 continue;
4013 else if (is_variable_sized (var))
4015 /* For variable sized types, we need to allocate the
4016 actual storage here. Call alloca and store the
4017 result in the pointer decl that we created elsewhere. */
4018 if (pass == 0)
4019 continue;
4021 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4023 gcall *stmt;
4024 tree tmp, atmp;
4026 ptr = DECL_VALUE_EXPR (new_var);
4027 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4028 ptr = TREE_OPERAND (ptr, 0);
4029 gcc_assert (DECL_P (ptr));
4030 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4032 /* void *tmp = __builtin_alloca */
4033 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4034 stmt = gimple_build_call (atmp, 2, x,
4035 size_int (DECL_ALIGN (var)));
4036 tmp = create_tmp_var_raw (ptr_type_node);
4037 gimple_add_tmp_var (tmp);
4038 gimple_call_set_lhs (stmt, tmp);
4040 gimple_seq_add_stmt (ilist, stmt);
4042 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4043 gimplify_assign (ptr, x, ilist);
4046 else if (omp_is_reference (var))
4048 /* For references that are being privatized for Fortran,
4049 allocate new backing storage for the new pointer
4050 variable. This allows us to avoid changing all the
4051 code that expects a pointer to something that expects
4052 a direct variable. */
4053 if (pass == 0)
4054 continue;
4056 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4057 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4059 x = build_receiver_ref (var, false, ctx);
4060 x = build_fold_addr_expr_loc (clause_loc, x);
4062 else if (TREE_CONSTANT (x))
4064 /* For reduction in SIMD loop, defer adding the
4065 initialization of the reference, because if we decide
4066 to use SIMD array for it, the initilization could cause
4067 expansion ICE. */
4068 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4069 x = NULL_TREE;
4070 else
4072 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4073 get_name (var));
4074 gimple_add_tmp_var (x);
4075 TREE_ADDRESSABLE (x) = 1;
4076 x = build_fold_addr_expr_loc (clause_loc, x);
4079 else
4081 tree atmp
4082 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4083 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4084 tree al = size_int (TYPE_ALIGN (rtype));
4085 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4088 if (x)
4090 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4091 gimplify_assign (new_var, x, ilist);
4094 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4096 else if (c_kind == OMP_CLAUSE_REDUCTION
4097 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4099 if (pass == 0)
4100 continue;
4102 else if (pass != 0)
4103 continue;
4105 switch (OMP_CLAUSE_CODE (c))
4107 case OMP_CLAUSE_SHARED:
4108 /* Ignore shared directives in teams construct. */
4109 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4110 continue;
4111 /* Shared global vars are just accessed directly. */
4112 if (is_global_var (new_var))
4113 break;
4114 /* For taskloop firstprivate/lastprivate, represented
4115 as firstprivate and shared clause on the task, new_var
4116 is the firstprivate var. */
4117 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4118 break;
4119 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4120 needs to be delayed until after fixup_child_record_type so
4121 that we get the correct type during the dereference. */
4122 by_ref = use_pointer_for_field (var, ctx);
4123 x = build_receiver_ref (var, by_ref, ctx);
4124 SET_DECL_VALUE_EXPR (new_var, x);
4125 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4127 /* ??? If VAR is not passed by reference, and the variable
4128 hasn't been initialized yet, then we'll get a warning for
4129 the store into the omp_data_s structure. Ideally, we'd be
4130 able to notice this and not store anything at all, but
4131 we're generating code too early. Suppress the warning. */
4132 if (!by_ref)
4133 TREE_NO_WARNING (var) = 1;
4134 break;
4136 case OMP_CLAUSE_LASTPRIVATE:
4137 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4138 break;
4139 /* FALLTHRU */
4141 case OMP_CLAUSE_PRIVATE:
4142 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4143 x = build_outer_var_ref (var, ctx);
4144 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4146 if (is_task_ctx (ctx))
4147 x = build_receiver_ref (var, false, ctx);
4148 else
4149 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4151 else
4152 x = NULL;
4153 do_private:
4154 tree nx;
4155 nx = lang_hooks.decls.omp_clause_default_ctor
4156 (c, unshare_expr (new_var), x);
4157 if (is_simd)
4159 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4160 if ((TREE_ADDRESSABLE (new_var) || nx || y
4161 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4162 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4163 ivar, lvar))
4165 if (nx)
4166 x = lang_hooks.decls.omp_clause_default_ctor
4167 (c, unshare_expr (ivar), x);
4168 if (nx && x)
4169 gimplify_and_add (x, &llist[0]);
4170 if (y)
4172 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4173 if (y)
4175 gimple_seq tseq = NULL;
4177 dtor = y;
4178 gimplify_stmt (&dtor, &tseq);
4179 gimple_seq_add_seq (&llist[1], tseq);
4182 break;
4185 if (nx)
4186 gimplify_and_add (nx, ilist);
4187 /* FALLTHRU */
4189 do_dtor:
4190 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4191 if (x)
4193 gimple_seq tseq = NULL;
4195 dtor = x;
4196 gimplify_stmt (&dtor, &tseq);
4197 gimple_seq_add_seq (dlist, tseq);
4199 break;
4201 case OMP_CLAUSE_LINEAR:
4202 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4203 goto do_firstprivate;
4204 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4205 x = NULL;
4206 else
4207 x = build_outer_var_ref (var, ctx);
4208 goto do_private;
4210 case OMP_CLAUSE_FIRSTPRIVATE:
4211 if (is_task_ctx (ctx))
4213 if (omp_is_reference (var) || is_variable_sized (var))
4214 goto do_dtor;
4215 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4216 ctx))
4217 || use_pointer_for_field (var, NULL))
4219 x = build_receiver_ref (var, false, ctx);
4220 SET_DECL_VALUE_EXPR (new_var, x);
4221 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4222 goto do_dtor;
4225 do_firstprivate:
4226 x = build_outer_var_ref (var, ctx);
4227 if (is_simd)
4229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4230 && gimple_omp_for_combined_into_p (ctx->stmt))
4232 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4233 tree stept = TREE_TYPE (t);
4234 tree ct = omp_find_clause (clauses,
4235 OMP_CLAUSE__LOOPTEMP_);
4236 gcc_assert (ct);
4237 tree l = OMP_CLAUSE_DECL (ct);
4238 tree n1 = fd->loop.n1;
4239 tree step = fd->loop.step;
4240 tree itype = TREE_TYPE (l);
4241 if (POINTER_TYPE_P (itype))
4242 itype = signed_type_for (itype);
4243 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4244 if (TYPE_UNSIGNED (itype)
4245 && fd->loop.cond_code == GT_EXPR)
4246 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4247 fold_build1 (NEGATE_EXPR, itype, l),
4248 fold_build1 (NEGATE_EXPR,
4249 itype, step));
4250 else
4251 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4252 t = fold_build2 (MULT_EXPR, stept,
4253 fold_convert (stept, l), t);
4255 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4257 x = lang_hooks.decls.omp_clause_linear_ctor
4258 (c, new_var, x, t);
4259 gimplify_and_add (x, ilist);
4260 goto do_dtor;
4263 if (POINTER_TYPE_P (TREE_TYPE (x)))
4264 x = fold_build2 (POINTER_PLUS_EXPR,
4265 TREE_TYPE (x), x, t);
4266 else
4267 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4270 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4271 || TREE_ADDRESSABLE (new_var))
4272 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4273 ivar, lvar))
4275 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4277 tree iv = create_tmp_var (TREE_TYPE (new_var));
4278 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4279 gimplify_and_add (x, ilist);
4280 gimple_stmt_iterator gsi
4281 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4282 gassign *g
4283 = gimple_build_assign (unshare_expr (lvar), iv);
4284 gsi_insert_before_without_update (&gsi, g,
4285 GSI_SAME_STMT);
4286 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4287 enum tree_code code = PLUS_EXPR;
4288 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4289 code = POINTER_PLUS_EXPR;
4290 g = gimple_build_assign (iv, code, iv, t);
4291 gsi_insert_before_without_update (&gsi, g,
4292 GSI_SAME_STMT);
4293 break;
4295 x = lang_hooks.decls.omp_clause_copy_ctor
4296 (c, unshare_expr (ivar), x);
4297 gimplify_and_add (x, &llist[0]);
4298 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4299 if (x)
4301 gimple_seq tseq = NULL;
4303 dtor = x;
4304 gimplify_stmt (&dtor, &tseq);
4305 gimple_seq_add_seq (&llist[1], tseq);
4307 break;
4310 x = lang_hooks.decls.omp_clause_copy_ctor
4311 (c, unshare_expr (new_var), x);
4312 gimplify_and_add (x, ilist);
4313 goto do_dtor;
4315 case OMP_CLAUSE__LOOPTEMP_:
4316 gcc_assert (is_taskreg_ctx (ctx));
4317 x = build_outer_var_ref (var, ctx);
4318 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4319 gimplify_and_add (x, ilist);
4320 break;
4322 case OMP_CLAUSE_COPYIN:
4323 by_ref = use_pointer_for_field (var, NULL);
4324 x = build_receiver_ref (var, by_ref, ctx);
4325 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4326 append_to_statement_list (x, &copyin_seq);
4327 copyin_by_ref |= by_ref;
4328 break;
4330 case OMP_CLAUSE_REDUCTION:
4331 /* OpenACC reductions are initialized using the
4332 GOACC_REDUCTION internal function. */
4333 if (is_gimple_omp_oacc (ctx->stmt))
4334 break;
4335 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4337 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4338 gimple *tseq;
4339 x = build_outer_var_ref (var, ctx);
4341 if (omp_is_reference (var)
4342 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4343 TREE_TYPE (x)))
4344 x = build_fold_addr_expr_loc (clause_loc, x);
4345 SET_DECL_VALUE_EXPR (placeholder, x);
4346 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4347 tree new_vard = new_var;
4348 if (omp_is_reference (var))
4350 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4351 new_vard = TREE_OPERAND (new_var, 0);
4352 gcc_assert (DECL_P (new_vard));
4354 if (is_simd
4355 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4356 ivar, lvar))
4358 if (new_vard == new_var)
4360 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4361 SET_DECL_VALUE_EXPR (new_var, ivar);
4363 else
4365 SET_DECL_VALUE_EXPR (new_vard,
4366 build_fold_addr_expr (ivar));
4367 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4369 x = lang_hooks.decls.omp_clause_default_ctor
4370 (c, unshare_expr (ivar),
4371 build_outer_var_ref (var, ctx));
4372 if (x)
4373 gimplify_and_add (x, &llist[0]);
4374 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4376 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4377 lower_omp (&tseq, ctx);
4378 gimple_seq_add_seq (&llist[0], tseq);
4380 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4381 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4382 lower_omp (&tseq, ctx);
4383 gimple_seq_add_seq (&llist[1], tseq);
4384 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4385 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4386 if (new_vard == new_var)
4387 SET_DECL_VALUE_EXPR (new_var, lvar);
4388 else
4389 SET_DECL_VALUE_EXPR (new_vard,
4390 build_fold_addr_expr (lvar));
4391 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4392 if (x)
4394 tseq = NULL;
4395 dtor = x;
4396 gimplify_stmt (&dtor, &tseq);
4397 gimple_seq_add_seq (&llist[1], tseq);
4399 break;
4401 /* If this is a reference to constant size reduction var
4402 with placeholder, we haven't emitted the initializer
4403 for it because it is undesirable if SIMD arrays are used.
4404 But if they aren't used, we need to emit the deferred
4405 initialization now. */
4406 else if (omp_is_reference (var) && is_simd)
4407 handle_simd_reference (clause_loc, new_vard, ilist);
4408 x = lang_hooks.decls.omp_clause_default_ctor
4409 (c, unshare_expr (new_var),
4410 build_outer_var_ref (var, ctx));
4411 if (x)
4412 gimplify_and_add (x, ilist);
4413 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4415 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4416 lower_omp (&tseq, ctx);
4417 gimple_seq_add_seq (ilist, tseq);
4419 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4420 if (is_simd)
4422 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4423 lower_omp (&tseq, ctx);
4424 gimple_seq_add_seq (dlist, tseq);
4425 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4427 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4428 goto do_dtor;
4430 else
4432 x = omp_reduction_init (c, TREE_TYPE (new_var));
4433 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4434 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4436 /* reduction(-:var) sums up the partial results, so it
4437 acts identically to reduction(+:var). */
4438 if (code == MINUS_EXPR)
4439 code = PLUS_EXPR;
4441 tree new_vard = new_var;
4442 if (is_simd && omp_is_reference (var))
4444 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4445 new_vard = TREE_OPERAND (new_var, 0);
4446 gcc_assert (DECL_P (new_vard));
4448 if (is_simd
4449 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4450 ivar, lvar))
4452 tree ref = build_outer_var_ref (var, ctx);
4454 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4456 if (sctx.is_simt)
4458 if (!simt_lane)
4459 simt_lane = create_tmp_var (unsigned_type_node);
4460 x = build_call_expr_internal_loc
4461 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4462 TREE_TYPE (ivar), 2, ivar, simt_lane);
4463 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4464 gimplify_assign (ivar, x, &llist[2]);
4466 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4467 ref = build_outer_var_ref (var, ctx);
4468 gimplify_assign (ref, x, &llist[1]);
4470 if (new_vard != new_var)
4472 SET_DECL_VALUE_EXPR (new_vard,
4473 build_fold_addr_expr (lvar));
4474 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4477 else
4479 if (omp_is_reference (var) && is_simd)
4480 handle_simd_reference (clause_loc, new_vard, ilist);
4481 gimplify_assign (new_var, x, ilist);
4482 if (is_simd)
4484 tree ref = build_outer_var_ref (var, ctx);
4486 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4487 ref = build_outer_var_ref (var, ctx);
4488 gimplify_assign (ref, x, dlist);
4492 break;
4494 default:
4495 gcc_unreachable ();
4500 if (sctx.max_vf == 1)
4501 sctx.is_simt = false;
4503 if (sctx.lane || sctx.is_simt)
4505 uid = create_tmp_var (ptr_type_node, "simduid");
4506 /* Don't want uninit warnings on simduid, it is always uninitialized,
4507 but we use it not for the value, but for the DECL_UID only. */
4508 TREE_NO_WARNING (uid) = 1;
4509 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4510 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4511 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4512 gimple_omp_for_set_clauses (ctx->stmt, c);
4514 /* Emit calls denoting privatized variables and initializing a pointer to
4515 structure that holds private variables as fields after ompdevlow pass. */
4516 if (sctx.is_simt)
4518 sctx.simt_eargs[0] = uid;
4519 gimple *g
4520 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4521 gimple_call_set_lhs (g, uid);
4522 gimple_seq_add_stmt (ilist, g);
4523 sctx.simt_eargs.release ();
4525 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4526 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4527 gimple_call_set_lhs (g, simtrec);
4528 gimple_seq_add_stmt (ilist, g);
4530 if (sctx.lane)
4532 gimple *g
4533 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4534 gimple_call_set_lhs (g, sctx.lane);
4535 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4536 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4537 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4538 build_int_cst (unsigned_type_node, 0));
4539 gimple_seq_add_stmt (ilist, g);
4540 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4541 if (llist[2])
4543 tree simt_vf = create_tmp_var (unsigned_type_node);
4544 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4545 gimple_call_set_lhs (g, simt_vf);
4546 gimple_seq_add_stmt (dlist, g);
4548 tree t = build_int_cst (unsigned_type_node, 1);
4549 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4550 gimple_seq_add_stmt (dlist, g);
4552 t = build_int_cst (unsigned_type_node, 0);
4553 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4554 gimple_seq_add_stmt (dlist, g);
4556 tree body = create_artificial_label (UNKNOWN_LOCATION);
4557 tree header = create_artificial_label (UNKNOWN_LOCATION);
4558 tree end = create_artificial_label (UNKNOWN_LOCATION);
4559 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4560 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4562 gimple_seq_add_seq (dlist, llist[2]);
4564 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4565 gimple_seq_add_stmt (dlist, g);
4567 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4568 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4569 gimple_seq_add_stmt (dlist, g);
4571 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4573 for (int i = 0; i < 2; i++)
4574 if (llist[i])
4576 tree vf = create_tmp_var (unsigned_type_node);
4577 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4578 gimple_call_set_lhs (g, vf);
4579 gimple_seq *seq = i == 0 ? ilist : dlist;
4580 gimple_seq_add_stmt (seq, g);
4581 tree t = build_int_cst (unsigned_type_node, 0);
4582 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4583 gimple_seq_add_stmt (seq, g);
4584 tree body = create_artificial_label (UNKNOWN_LOCATION);
4585 tree header = create_artificial_label (UNKNOWN_LOCATION);
4586 tree end = create_artificial_label (UNKNOWN_LOCATION);
4587 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4588 gimple_seq_add_stmt (seq, gimple_build_label (body));
4589 gimple_seq_add_seq (seq, llist[i]);
4590 t = build_int_cst (unsigned_type_node, 1);
4591 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4592 gimple_seq_add_stmt (seq, g);
4593 gimple_seq_add_stmt (seq, gimple_build_label (header));
4594 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4595 gimple_seq_add_stmt (seq, g);
4596 gimple_seq_add_stmt (seq, gimple_build_label (end));
4599 if (sctx.is_simt)
4601 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4602 gimple *g
4603 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4604 gimple_seq_add_stmt (dlist, g);
4607 /* The copyin sequence is not to be executed by the main thread, since
4608 that would result in self-copies. Perhaps not visible to scalars,
4609 but it certainly is to C++ operator=. */
4610 if (copyin_seq)
4612 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4614 x = build2 (NE_EXPR, boolean_type_node, x,
4615 build_int_cst (TREE_TYPE (x), 0));
4616 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4617 gimplify_and_add (x, ilist);
4620 /* If any copyin variable is passed by reference, we must ensure the
4621 master thread doesn't modify it before it is copied over in all
4622 threads. Similarly for variables in both firstprivate and
4623 lastprivate clauses we need to ensure the lastprivate copying
4624 happens after firstprivate copying in all threads. And similarly
4625 for UDRs if initializer expression refers to omp_orig. */
4626 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4628 /* Don't add any barrier for #pragma omp simd or
4629 #pragma omp distribute. */
4630 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4631 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4632 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4635 /* If max_vf is non-zero, then we can use only a vectorization factor
4636 up to the max_vf we chose. So stick it into the safelen clause. */
4637 if (sctx.max_vf)
4639 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4640 OMP_CLAUSE_SAFELEN);
4641 if (c == NULL_TREE
4642 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4643 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4644 sctx.max_vf) == 1))
4646 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4647 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4648 sctx.max_vf);
4649 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4650 gimple_omp_for_set_clauses (ctx->stmt, c);
4656 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4657 both parallel and workshare constructs. PREDICATE may be NULL if it's
4658 always true. */
4660 static void
4661 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4662 omp_context *ctx)
4664 tree x, c, label = NULL, orig_clauses = clauses;
4665 bool par_clauses = false;
4666 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4668 /* Early exit if there are no lastprivate or linear clauses. */
4669 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4670 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4671 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4672 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4673 break;
4674 if (clauses == NULL)
4676 /* If this was a workshare clause, see if it had been combined
4677 with its parallel. In that case, look for the clauses on the
4678 parallel statement itself. */
4679 if (is_parallel_ctx (ctx))
4680 return;
4682 ctx = ctx->outer;
4683 if (ctx == NULL || !is_parallel_ctx (ctx))
4684 return;
4686 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4687 OMP_CLAUSE_LASTPRIVATE);
4688 if (clauses == NULL)
4689 return;
4690 par_clauses = true;
4693 bool maybe_simt = false;
4694 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4695 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4697 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4698 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4699 if (simduid)
4700 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4703 if (predicate)
4705 gcond *stmt;
4706 tree label_true, arm1, arm2;
4707 enum tree_code pred_code = TREE_CODE (predicate);
4709 label = create_artificial_label (UNKNOWN_LOCATION);
4710 label_true = create_artificial_label (UNKNOWN_LOCATION);
4711 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4713 arm1 = TREE_OPERAND (predicate, 0);
4714 arm2 = TREE_OPERAND (predicate, 1);
4715 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4716 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4718 else
4720 arm1 = predicate;
4721 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4722 arm2 = boolean_false_node;
4723 pred_code = NE_EXPR;
4725 if (maybe_simt)
4727 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4728 c = fold_convert (integer_type_node, c);
4729 simtcond = create_tmp_var (integer_type_node);
4730 gimplify_assign (simtcond, c, stmt_list);
4731 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4732 1, simtcond);
4733 c = create_tmp_var (integer_type_node);
4734 gimple_call_set_lhs (g, c);
4735 gimple_seq_add_stmt (stmt_list, g);
4736 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4737 label_true, label);
4739 else
4740 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4741 gimple_seq_add_stmt (stmt_list, stmt);
4742 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4745 for (c = clauses; c ;)
4747 tree var, new_var;
4748 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4750 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4751 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4752 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4754 var = OMP_CLAUSE_DECL (c);
4755 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4756 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4757 && is_taskloop_ctx (ctx))
4759 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4760 new_var = lookup_decl (var, ctx->outer);
4762 else
4764 new_var = lookup_decl (var, ctx);
4765 /* Avoid uninitialized warnings for lastprivate and
4766 for linear iterators. */
4767 if (predicate
4768 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4769 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4770 TREE_NO_WARNING (new_var) = 1;
4773 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4775 tree val = DECL_VALUE_EXPR (new_var);
4776 if (TREE_CODE (val) == ARRAY_REF
4777 && VAR_P (TREE_OPERAND (val, 0))
4778 && lookup_attribute ("omp simd array",
4779 DECL_ATTRIBUTES (TREE_OPERAND (val,
4780 0))))
4782 if (lastlane == NULL)
4784 lastlane = create_tmp_var (unsigned_type_node);
4785 gcall *g
4786 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4787 2, simduid,
4788 TREE_OPERAND (val, 1));
4789 gimple_call_set_lhs (g, lastlane);
4790 gimple_seq_add_stmt (stmt_list, g);
4792 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4793 TREE_OPERAND (val, 0), lastlane,
4794 NULL_TREE, NULL_TREE);
4797 else if (maybe_simt)
4799 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4800 ? DECL_VALUE_EXPR (new_var)
4801 : new_var);
4802 if (simtlast == NULL)
4804 simtlast = create_tmp_var (unsigned_type_node);
4805 gcall *g = gimple_build_call_internal
4806 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4807 gimple_call_set_lhs (g, simtlast);
4808 gimple_seq_add_stmt (stmt_list, g);
4810 x = build_call_expr_internal_loc
4811 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4812 TREE_TYPE (val), 2, val, simtlast);
4813 new_var = unshare_expr (new_var);
4814 gimplify_assign (new_var, x, stmt_list);
4815 new_var = unshare_expr (new_var);
4818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4819 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4821 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4822 gimple_seq_add_seq (stmt_list,
4823 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4824 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4826 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4827 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4829 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4830 gimple_seq_add_seq (stmt_list,
4831 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4832 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4835 x = NULL_TREE;
4836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4837 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4839 gcc_checking_assert (is_taskloop_ctx (ctx));
4840 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4841 ctx->outer->outer);
4842 if (is_global_var (ovar))
4843 x = ovar;
4845 if (!x)
4846 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4847 if (omp_is_reference (var))
4848 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4849 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4850 gimplify_and_add (x, stmt_list);
4852 c = OMP_CLAUSE_CHAIN (c);
4853 if (c == NULL && !par_clauses)
4855 /* If this was a workshare clause, see if it had been combined
4856 with its parallel. In that case, continue looking for the
4857 clauses also on the parallel statement itself. */
4858 if (is_parallel_ctx (ctx))
4859 break;
4861 ctx = ctx->outer;
4862 if (ctx == NULL || !is_parallel_ctx (ctx))
4863 break;
4865 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4866 OMP_CLAUSE_LASTPRIVATE);
4867 par_clauses = true;
4871 if (label)
4872 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4875 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4876 (which might be a placeholder). INNER is true if this is an inner
4877 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4878 join markers. Generate the before-loop forking sequence in
4879 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4880 general form of these sequences is
4882 GOACC_REDUCTION_SETUP
4883 GOACC_FORK
4884 GOACC_REDUCTION_INIT
4886 GOACC_REDUCTION_FINI
4887 GOACC_JOIN
4888 GOACC_REDUCTION_TEARDOWN. */
4890 static void
4891 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4892 gcall *fork, gcall *join, gimple_seq *fork_seq,
4893 gimple_seq *join_seq, omp_context *ctx)
4895 gimple_seq before_fork = NULL;
4896 gimple_seq after_fork = NULL;
4897 gimple_seq before_join = NULL;
4898 gimple_seq after_join = NULL;
4899 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4900 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4901 unsigned offset = 0;
4903 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4904 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4906 tree orig = OMP_CLAUSE_DECL (c);
4907 tree var = maybe_lookup_decl (orig, ctx);
4908 tree ref_to_res = NULL_TREE;
4909 tree incoming, outgoing, v1, v2, v3;
4910 bool is_private = false;
4912 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4913 if (rcode == MINUS_EXPR)
4914 rcode = PLUS_EXPR;
4915 else if (rcode == TRUTH_ANDIF_EXPR)
4916 rcode = BIT_AND_EXPR;
4917 else if (rcode == TRUTH_ORIF_EXPR)
4918 rcode = BIT_IOR_EXPR;
4919 tree op = build_int_cst (unsigned_type_node, rcode);
4921 if (!var)
4922 var = orig;
4924 incoming = outgoing = var;
4926 if (!inner)
4928 /* See if an outer construct also reduces this variable. */
4929 omp_context *outer = ctx;
4931 while (omp_context *probe = outer->outer)
4933 enum gimple_code type = gimple_code (probe->stmt);
4934 tree cls;
4936 switch (type)
4938 case GIMPLE_OMP_FOR:
4939 cls = gimple_omp_for_clauses (probe->stmt);
4940 break;
4942 case GIMPLE_OMP_TARGET:
4943 if (gimple_omp_target_kind (probe->stmt)
4944 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4945 goto do_lookup;
4947 cls = gimple_omp_target_clauses (probe->stmt);
4948 break;
4950 default:
4951 goto do_lookup;
4954 outer = probe;
4955 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4956 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4957 && orig == OMP_CLAUSE_DECL (cls))
4959 incoming = outgoing = lookup_decl (orig, probe);
4960 goto has_outer_reduction;
4962 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4963 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4964 && orig == OMP_CLAUSE_DECL (cls))
4966 is_private = true;
4967 goto do_lookup;
4971 do_lookup:
4972 /* This is the outermost construct with this reduction,
4973 see if there's a mapping for it. */
4974 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4975 && maybe_lookup_field (orig, outer) && !is_private)
4977 ref_to_res = build_receiver_ref (orig, false, outer);
4978 if (omp_is_reference (orig))
4979 ref_to_res = build_simple_mem_ref (ref_to_res);
4981 tree type = TREE_TYPE (var);
4982 if (POINTER_TYPE_P (type))
4983 type = TREE_TYPE (type);
4985 outgoing = var;
4986 incoming = omp_reduction_init_op (loc, rcode, type);
4988 else
4990 /* Try to look at enclosing contexts for reduction var,
4991 use original if no mapping found. */
4992 tree t = NULL_TREE;
4993 omp_context *c = ctx->outer;
4994 while (c && !t)
4996 t = maybe_lookup_decl (orig, c);
4997 c = c->outer;
4999 incoming = outgoing = (t ? t : orig);
5002 has_outer_reduction:;
5005 if (!ref_to_res)
5006 ref_to_res = integer_zero_node;
5008 if (omp_is_reference (orig))
5010 tree type = TREE_TYPE (var);
5011 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5013 if (!inner)
5015 tree x = create_tmp_var (TREE_TYPE (type), id);
5016 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5019 v1 = create_tmp_var (type, id);
5020 v2 = create_tmp_var (type, id);
5021 v3 = create_tmp_var (type, id);
5023 gimplify_assign (v1, var, fork_seq);
5024 gimplify_assign (v2, var, fork_seq);
5025 gimplify_assign (v3, var, fork_seq);
5027 var = build_simple_mem_ref (var);
5028 v1 = build_simple_mem_ref (v1);
5029 v2 = build_simple_mem_ref (v2);
5030 v3 = build_simple_mem_ref (v3);
5031 outgoing = build_simple_mem_ref (outgoing);
5033 if (!TREE_CONSTANT (incoming))
5034 incoming = build_simple_mem_ref (incoming);
5036 else
5037 v1 = v2 = v3 = var;
5039 /* Determine position in reduction buffer, which may be used
5040 by target. */
5041 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5042 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5043 offset = (offset + align - 1) & ~(align - 1);
5044 tree off = build_int_cst (sizetype, offset);
5045 offset += GET_MODE_SIZE (mode);
5047 if (!init_code)
5049 init_code = build_int_cst (integer_type_node,
5050 IFN_GOACC_REDUCTION_INIT);
5051 fini_code = build_int_cst (integer_type_node,
5052 IFN_GOACC_REDUCTION_FINI);
5053 setup_code = build_int_cst (integer_type_node,
5054 IFN_GOACC_REDUCTION_SETUP);
5055 teardown_code = build_int_cst (integer_type_node,
5056 IFN_GOACC_REDUCTION_TEARDOWN);
5059 tree setup_call
5060 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5061 TREE_TYPE (var), 6, setup_code,
5062 unshare_expr (ref_to_res),
5063 incoming, level, op, off);
5064 tree init_call
5065 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5066 TREE_TYPE (var), 6, init_code,
5067 unshare_expr (ref_to_res),
5068 v1, level, op, off);
5069 tree fini_call
5070 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5071 TREE_TYPE (var), 6, fini_code,
5072 unshare_expr (ref_to_res),
5073 v2, level, op, off);
5074 tree teardown_call
5075 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5076 TREE_TYPE (var), 6, teardown_code,
5077 ref_to_res, v3, level, op, off);
5079 gimplify_assign (v1, setup_call, &before_fork);
5080 gimplify_assign (v2, init_call, &after_fork);
5081 gimplify_assign (v3, fini_call, &before_join);
5082 gimplify_assign (outgoing, teardown_call, &after_join);
5085 /* Now stitch things together. */
5086 gimple_seq_add_seq (fork_seq, before_fork);
5087 if (fork)
5088 gimple_seq_add_stmt (fork_seq, fork);
5089 gimple_seq_add_seq (fork_seq, after_fork);
5091 gimple_seq_add_seq (join_seq, before_join);
5092 if (join)
5093 gimple_seq_add_stmt (join_seq, join);
5094 gimple_seq_add_seq (join_seq, after_join);
5097 /* Generate code to implement the REDUCTION clauses. */
5099 static void
5100 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5102 gimple_seq sub_seq = NULL;
5103 gimple *stmt;
5104 tree x, c;
5105 int count = 0;
5107 /* OpenACC loop reductions are handled elsewhere. */
5108 if (is_gimple_omp_oacc (ctx->stmt))
5109 return;
5111 /* SIMD reductions are handled in lower_rec_input_clauses. */
5112 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5113 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5114 return;
5116 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5117 update in that case, otherwise use a lock. */
5118 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5119 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5121 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5122 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5124 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5125 count = -1;
5126 break;
5128 count++;
5131 if (count == 0)
5132 return;
5134 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5136 tree var, ref, new_var, orig_var;
5137 enum tree_code code;
5138 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5140 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5141 continue;
5143 orig_var = var = OMP_CLAUSE_DECL (c);
5144 if (TREE_CODE (var) == MEM_REF)
5146 var = TREE_OPERAND (var, 0);
5147 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5148 var = TREE_OPERAND (var, 0);
5149 if (TREE_CODE (var) == INDIRECT_REF
5150 || TREE_CODE (var) == ADDR_EXPR)
5151 var = TREE_OPERAND (var, 0);
5152 orig_var = var;
5153 if (is_variable_sized (var))
5155 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5156 var = DECL_VALUE_EXPR (var);
5157 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5158 var = TREE_OPERAND (var, 0);
5159 gcc_assert (DECL_P (var));
5162 new_var = lookup_decl (var, ctx);
5163 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5164 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5165 ref = build_outer_var_ref (var, ctx);
5166 code = OMP_CLAUSE_REDUCTION_CODE (c);
5168 /* reduction(-:var) sums up the partial results, so it acts
5169 identically to reduction(+:var). */
5170 if (code == MINUS_EXPR)
5171 code = PLUS_EXPR;
5173 if (count == 1)
5175 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5177 addr = save_expr (addr);
5178 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5179 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5180 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5181 gimplify_and_add (x, stmt_seqp);
5182 return;
5184 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5186 tree d = OMP_CLAUSE_DECL (c);
5187 tree type = TREE_TYPE (d);
5188 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5189 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5190 tree ptype = build_pointer_type (TREE_TYPE (type));
5191 tree bias = TREE_OPERAND (d, 1);
5192 d = TREE_OPERAND (d, 0);
5193 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5195 tree b = TREE_OPERAND (d, 1);
5196 b = maybe_lookup_decl (b, ctx);
5197 if (b == NULL)
5199 b = TREE_OPERAND (d, 1);
5200 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5202 if (integer_zerop (bias))
5203 bias = b;
5204 else
5206 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5207 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5208 TREE_TYPE (b), b, bias);
5210 d = TREE_OPERAND (d, 0);
5212 /* For ref build_outer_var_ref already performs this, so
5213 only new_var needs a dereference. */
5214 if (TREE_CODE (d) == INDIRECT_REF)
5216 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5217 gcc_assert (omp_is_reference (var) && var == orig_var);
5219 else if (TREE_CODE (d) == ADDR_EXPR)
5221 if (orig_var == var)
5223 new_var = build_fold_addr_expr (new_var);
5224 ref = build_fold_addr_expr (ref);
5227 else
5229 gcc_assert (orig_var == var);
5230 if (omp_is_reference (var))
5231 ref = build_fold_addr_expr (ref);
5233 if (DECL_P (v))
5235 tree t = maybe_lookup_decl (v, ctx);
5236 if (t)
5237 v = t;
5238 else
5239 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5240 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5242 if (!integer_zerop (bias))
5244 bias = fold_convert_loc (clause_loc, sizetype, bias);
5245 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5246 TREE_TYPE (new_var), new_var,
5247 unshare_expr (bias));
5248 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5249 TREE_TYPE (ref), ref, bias);
5251 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5252 ref = fold_convert_loc (clause_loc, ptype, ref);
5253 tree m = create_tmp_var (ptype, NULL);
5254 gimplify_assign (m, new_var, stmt_seqp);
5255 new_var = m;
5256 m = create_tmp_var (ptype, NULL);
5257 gimplify_assign (m, ref, stmt_seqp);
5258 ref = m;
5259 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5260 tree body = create_artificial_label (UNKNOWN_LOCATION);
5261 tree end = create_artificial_label (UNKNOWN_LOCATION);
5262 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5263 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5264 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5265 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5267 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5268 tree decl_placeholder
5269 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5270 SET_DECL_VALUE_EXPR (placeholder, out);
5271 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5272 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5273 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5274 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5275 gimple_seq_add_seq (&sub_seq,
5276 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5277 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5278 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5279 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5281 else
5283 x = build2 (code, TREE_TYPE (out), out, priv);
5284 out = unshare_expr (out);
5285 gimplify_assign (out, x, &sub_seq);
5287 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5288 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5289 gimple_seq_add_stmt (&sub_seq, g);
5290 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5291 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5292 gimple_seq_add_stmt (&sub_seq, g);
5293 g = gimple_build_assign (i, PLUS_EXPR, i,
5294 build_int_cst (TREE_TYPE (i), 1));
5295 gimple_seq_add_stmt (&sub_seq, g);
5296 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5297 gimple_seq_add_stmt (&sub_seq, g);
5298 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5300 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5302 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5304 if (omp_is_reference (var)
5305 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5306 TREE_TYPE (ref)))
5307 ref = build_fold_addr_expr_loc (clause_loc, ref);
5308 SET_DECL_VALUE_EXPR (placeholder, ref);
5309 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5310 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5311 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5312 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5313 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5315 else
5317 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5318 ref = build_outer_var_ref (var, ctx);
5319 gimplify_assign (ref, x, &sub_seq);
5323 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5325 gimple_seq_add_stmt (stmt_seqp, stmt);
5327 gimple_seq_add_seq (stmt_seqp, sub_seq);
5329 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5331 gimple_seq_add_stmt (stmt_seqp, stmt);
5335 /* Generate code to implement the COPYPRIVATE clauses. */
5337 static void
5338 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5339 omp_context *ctx)
5341 tree c;
5343 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5345 tree var, new_var, ref, x;
5346 bool by_ref;
5347 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5349 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5350 continue;
5352 var = OMP_CLAUSE_DECL (c);
5353 by_ref = use_pointer_for_field (var, NULL);
5355 ref = build_sender_ref (var, ctx);
5356 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5357 if (by_ref)
5359 x = build_fold_addr_expr_loc (clause_loc, new_var);
5360 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5362 gimplify_assign (ref, x, slist);
5364 ref = build_receiver_ref (var, false, ctx);
5365 if (by_ref)
5367 ref = fold_convert_loc (clause_loc,
5368 build_pointer_type (TREE_TYPE (new_var)),
5369 ref);
5370 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5372 if (omp_is_reference (var))
5374 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5375 ref = build_simple_mem_ref_loc (clause_loc, ref);
5376 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5378 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5379 gimplify_and_add (x, rlist);
5384 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5385 and REDUCTION from the sender (aka parent) side. */
5387 static void
5388 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5389 omp_context *ctx)
5391 tree c, t;
5392 int ignored_looptemp = 0;
5393 bool is_taskloop = false;
5395 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5396 by GOMP_taskloop. */
5397 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5399 ignored_looptemp = 2;
5400 is_taskloop = true;
5403 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5405 tree val, ref, x, var;
5406 bool by_ref, do_in = false, do_out = false;
5407 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5409 switch (OMP_CLAUSE_CODE (c))
5411 case OMP_CLAUSE_PRIVATE:
5412 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5413 break;
5414 continue;
5415 case OMP_CLAUSE_FIRSTPRIVATE:
5416 case OMP_CLAUSE_COPYIN:
5417 case OMP_CLAUSE_LASTPRIVATE:
5418 case OMP_CLAUSE_REDUCTION:
5419 break;
5420 case OMP_CLAUSE_SHARED:
5421 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5422 break;
5423 continue;
5424 case OMP_CLAUSE__LOOPTEMP_:
5425 if (ignored_looptemp)
5427 ignored_looptemp--;
5428 continue;
5430 break;
5431 default:
5432 continue;
5435 val = OMP_CLAUSE_DECL (c);
5436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5437 && TREE_CODE (val) == MEM_REF)
5439 val = TREE_OPERAND (val, 0);
5440 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5441 val = TREE_OPERAND (val, 0);
5442 if (TREE_CODE (val) == INDIRECT_REF
5443 || TREE_CODE (val) == ADDR_EXPR)
5444 val = TREE_OPERAND (val, 0);
5445 if (is_variable_sized (val))
5446 continue;
5449 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5450 outer taskloop region. */
5451 omp_context *ctx_for_o = ctx;
5452 if (is_taskloop
5453 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5454 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5455 ctx_for_o = ctx->outer;
5457 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5459 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5460 && is_global_var (var))
5461 continue;
5463 t = omp_member_access_dummy_var (var);
5464 if (t)
5466 var = DECL_VALUE_EXPR (var);
5467 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5468 if (o != t)
5469 var = unshare_and_remap (var, t, o);
5470 else
5471 var = unshare_expr (var);
5474 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5476 /* Handle taskloop firstprivate/lastprivate, where the
5477 lastprivate on GIMPLE_OMP_TASK is represented as
5478 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5479 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5480 x = omp_build_component_ref (ctx->sender_decl, f);
5481 if (use_pointer_for_field (val, ctx))
5482 var = build_fold_addr_expr (var);
5483 gimplify_assign (x, var, ilist);
5484 DECL_ABSTRACT_ORIGIN (f) = NULL;
5485 continue;
5488 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5489 || val == OMP_CLAUSE_DECL (c))
5490 && is_variable_sized (val))
5491 continue;
5492 by_ref = use_pointer_for_field (val, NULL);
5494 switch (OMP_CLAUSE_CODE (c))
5496 case OMP_CLAUSE_FIRSTPRIVATE:
5497 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5498 && !by_ref
5499 && is_task_ctx (ctx))
5500 TREE_NO_WARNING (var) = 1;
5501 do_in = true;
5502 break;
5504 case OMP_CLAUSE_PRIVATE:
5505 case OMP_CLAUSE_COPYIN:
5506 case OMP_CLAUSE__LOOPTEMP_:
5507 do_in = true;
5508 break;
5510 case OMP_CLAUSE_LASTPRIVATE:
5511 if (by_ref || omp_is_reference (val))
5513 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5514 continue;
5515 do_in = true;
5517 else
5519 do_out = true;
5520 if (lang_hooks.decls.omp_private_outer_ref (val))
5521 do_in = true;
5523 break;
5525 case OMP_CLAUSE_REDUCTION:
5526 do_in = true;
5527 if (val == OMP_CLAUSE_DECL (c))
5528 do_out = !(by_ref || omp_is_reference (val));
5529 else
5530 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5531 break;
5533 default:
5534 gcc_unreachable ();
5537 if (do_in)
5539 ref = build_sender_ref (val, ctx);
5540 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5541 gimplify_assign (ref, x, ilist);
5542 if (is_task_ctx (ctx))
5543 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5546 if (do_out)
5548 ref = build_sender_ref (val, ctx);
5549 gimplify_assign (var, ref, olist);
5554 /* Generate code to implement SHARED from the sender (aka parent)
5555 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5556 list things that got automatically shared. */
5558 static void
5559 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5561 tree var, ovar, nvar, t, f, x, record_type;
5563 if (ctx->record_type == NULL)
5564 return;
5566 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5567 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5569 ovar = DECL_ABSTRACT_ORIGIN (f);
5570 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5571 continue;
5573 nvar = maybe_lookup_decl (ovar, ctx);
5574 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5575 continue;
5577 /* If CTX is a nested parallel directive. Find the immediately
5578 enclosing parallel or workshare construct that contains a
5579 mapping for OVAR. */
5580 var = lookup_decl_in_outer_ctx (ovar, ctx);
5582 t = omp_member_access_dummy_var (var);
5583 if (t)
5585 var = DECL_VALUE_EXPR (var);
5586 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5587 if (o != t)
5588 var = unshare_and_remap (var, t, o);
5589 else
5590 var = unshare_expr (var);
5593 if (use_pointer_for_field (ovar, ctx))
5595 x = build_sender_ref (ovar, ctx);
5596 var = build_fold_addr_expr (var);
5597 gimplify_assign (x, var, ilist);
5599 else
5601 x = build_sender_ref (ovar, ctx);
5602 gimplify_assign (x, var, ilist);
5604 if (!TREE_READONLY (var)
5605 /* We don't need to receive a new reference to a result
5606 or parm decl. In fact we may not store to it as we will
5607 invalidate any pending RSO and generate wrong gimple
5608 during inlining. */
5609 && !((TREE_CODE (var) == RESULT_DECL
5610 || TREE_CODE (var) == PARM_DECL)
5611 && DECL_BY_REFERENCE (var)))
5613 x = build_sender_ref (ovar, ctx);
5614 gimplify_assign (var, x, olist);
5620 /* Emit an OpenACC head marker call, encapulating the partitioning and
5621 other information that must be processed by the target compiler.
5622 Return the maximum number of dimensions the associated loop might
5623 be partitioned over. */
5625 static unsigned
5626 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5627 gimple_seq *seq, omp_context *ctx)
5629 unsigned levels = 0;
5630 unsigned tag = 0;
5631 tree gang_static = NULL_TREE;
5632 auto_vec<tree, 5> args;
5634 args.quick_push (build_int_cst
5635 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5636 args.quick_push (ddvar);
5637 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5639 switch (OMP_CLAUSE_CODE (c))
5641 case OMP_CLAUSE_GANG:
5642 tag |= OLF_DIM_GANG;
5643 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5644 /* static:* is represented by -1, and we can ignore it, as
5645 scheduling is always static. */
5646 if (gang_static && integer_minus_onep (gang_static))
5647 gang_static = NULL_TREE;
5648 levels++;
5649 break;
5651 case OMP_CLAUSE_WORKER:
5652 tag |= OLF_DIM_WORKER;
5653 levels++;
5654 break;
5656 case OMP_CLAUSE_VECTOR:
5657 tag |= OLF_DIM_VECTOR;
5658 levels++;
5659 break;
5661 case OMP_CLAUSE_SEQ:
5662 tag |= OLF_SEQ;
5663 break;
5665 case OMP_CLAUSE_AUTO:
5666 tag |= OLF_AUTO;
5667 break;
5669 case OMP_CLAUSE_INDEPENDENT:
5670 tag |= OLF_INDEPENDENT;
5671 break;
5673 case OMP_CLAUSE_TILE:
5674 tag |= OLF_TILE;
5675 break;
5677 default:
5678 continue;
5682 if (gang_static)
5684 if (DECL_P (gang_static))
5685 gang_static = build_outer_var_ref (gang_static, ctx);
5686 tag |= OLF_GANG_STATIC;
5689 /* In a parallel region, loops are implicitly INDEPENDENT. */
5690 omp_context *tgt = enclosing_target_ctx (ctx);
5691 if (!tgt || is_oacc_parallel (tgt))
5692 tag |= OLF_INDEPENDENT;
5694 if (tag & OLF_TILE)
5695 /* Tiling could use all 3 levels. */
5696 levels = 3;
5697 else
5699 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5700 Ensure at least one level, or 2 for possible auto
5701 partitioning */
5702 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5703 << OLF_DIM_BASE) | OLF_SEQ));
5705 if (levels < 1u + maybe_auto)
5706 levels = 1u + maybe_auto;
5709 args.quick_push (build_int_cst (integer_type_node, levels));
5710 args.quick_push (build_int_cst (integer_type_node, tag));
5711 if (gang_static)
5712 args.quick_push (gang_static);
5714 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5715 gimple_set_location (call, loc);
5716 gimple_set_lhs (call, ddvar);
5717 gimple_seq_add_stmt (seq, call);
5719 return levels;
5722 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5723 partitioning level of the enclosed region. */
5725 static void
5726 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5727 tree tofollow, gimple_seq *seq)
5729 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5730 : IFN_UNIQUE_OACC_TAIL_MARK);
5731 tree marker = build_int_cst (integer_type_node, marker_kind);
5732 int nargs = 2 + (tofollow != NULL_TREE);
5733 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5734 marker, ddvar, tofollow);
5735 gimple_set_location (call, loc);
5736 gimple_set_lhs (call, ddvar);
5737 gimple_seq_add_stmt (seq, call);
5740 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5741 the loop clauses, from which we extract reductions. Initialize
5742 HEAD and TAIL. */
5744 static void
5745 lower_oacc_head_tail (location_t loc, tree clauses,
5746 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5748 bool inner = false;
5749 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5750 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5752 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5753 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5754 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5756 gcc_assert (count);
5757 for (unsigned done = 1; count; count--, done++)
5759 gimple_seq fork_seq = NULL;
5760 gimple_seq join_seq = NULL;
5762 tree place = build_int_cst (integer_type_node, -1);
5763 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5764 fork_kind, ddvar, place);
5765 gimple_set_location (fork, loc);
5766 gimple_set_lhs (fork, ddvar);
5768 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5769 join_kind, ddvar, place);
5770 gimple_set_location (join, loc);
5771 gimple_set_lhs (join, ddvar);
5773 /* Mark the beginning of this level sequence. */
5774 if (inner)
5775 lower_oacc_loop_marker (loc, ddvar, true,
5776 build_int_cst (integer_type_node, count),
5777 &fork_seq);
5778 lower_oacc_loop_marker (loc, ddvar, false,
5779 build_int_cst (integer_type_node, done),
5780 &join_seq);
5782 lower_oacc_reductions (loc, clauses, place, inner,
5783 fork, join, &fork_seq, &join_seq, ctx);
5785 /* Append this level to head. */
5786 gimple_seq_add_seq (head, fork_seq);
5787 /* Prepend it to tail. */
5788 gimple_seq_add_seq (&join_seq, *tail);
5789 *tail = join_seq;
5791 inner = true;
5794 /* Mark the end of the sequence. */
5795 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5796 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5799 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5800 catch handler and return it. This prevents programs from violating the
5801 structured block semantics with throws. */
5803 static gimple_seq
5804 maybe_catch_exception (gimple_seq body)
5806 gimple *g;
5807 tree decl;
5809 if (!flag_exceptions)
5810 return body;
5812 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5813 decl = lang_hooks.eh_protect_cleanup_actions ();
5814 else
5815 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5817 g = gimple_build_eh_must_not_throw (decl);
5818 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5819 GIMPLE_TRY_CATCH);
5821 return gimple_seq_alloc_with_stmt (g);
5825 /* Routines to lower OMP directives into OMP-GIMPLE. */
5827 /* If ctx is a worksharing context inside of a cancellable parallel
5828 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5829 and conditional branch to parallel's cancel_label to handle
5830 cancellation in the implicit barrier. */
5832 static void
5833 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5835 gimple *omp_return = gimple_seq_last_stmt (*body);
5836 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5837 if (gimple_omp_return_nowait_p (omp_return))
5838 return;
5839 if (ctx->outer
5840 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5841 && ctx->outer->cancellable)
5843 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5844 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5845 tree lhs = create_tmp_var (c_bool_type);
5846 gimple_omp_return_set_lhs (omp_return, lhs);
5847 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5848 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5849 fold_convert (c_bool_type,
5850 boolean_false_node),
5851 ctx->outer->cancel_label, fallthru_label);
5852 gimple_seq_add_stmt (body, g);
5853 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5857 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5858 CTX is the enclosing OMP context for the current statement. */
5860 static void
5861 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5863 tree block, control;
5864 gimple_stmt_iterator tgsi;
5865 gomp_sections *stmt;
5866 gimple *t;
5867 gbind *new_stmt, *bind;
5868 gimple_seq ilist, dlist, olist, new_body;
5870 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5872 push_gimplify_context ();
5874 dlist = NULL;
5875 ilist = NULL;
5876 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5877 &ilist, &dlist, ctx, NULL);
5879 new_body = gimple_omp_body (stmt);
5880 gimple_omp_set_body (stmt, NULL);
5881 tgsi = gsi_start (new_body);
5882 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5884 omp_context *sctx;
5885 gimple *sec_start;
5887 sec_start = gsi_stmt (tgsi);
5888 sctx = maybe_lookup_ctx (sec_start);
5889 gcc_assert (sctx);
5891 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5892 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5893 GSI_CONTINUE_LINKING);
5894 gimple_omp_set_body (sec_start, NULL);
5896 if (gsi_one_before_end_p (tgsi))
5898 gimple_seq l = NULL;
5899 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5900 &l, ctx);
5901 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5902 gimple_omp_section_set_last (sec_start);
5905 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5906 GSI_CONTINUE_LINKING);
5909 block = make_node (BLOCK);
5910 bind = gimple_build_bind (NULL, new_body, block);
5912 olist = NULL;
5913 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5915 block = make_node (BLOCK);
5916 new_stmt = gimple_build_bind (NULL, NULL, block);
5917 gsi_replace (gsi_p, new_stmt, true);
5919 pop_gimplify_context (new_stmt);
5920 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5921 BLOCK_VARS (block) = gimple_bind_vars (bind);
5922 if (BLOCK_VARS (block))
5923 TREE_USED (block) = 1;
5925 new_body = NULL;
5926 gimple_seq_add_seq (&new_body, ilist);
5927 gimple_seq_add_stmt (&new_body, stmt);
5928 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5929 gimple_seq_add_stmt (&new_body, bind);
5931 control = create_tmp_var (unsigned_type_node, ".section");
5932 t = gimple_build_omp_continue (control, control);
5933 gimple_omp_sections_set_control (stmt, control);
5934 gimple_seq_add_stmt (&new_body, t);
5936 gimple_seq_add_seq (&new_body, olist);
5937 if (ctx->cancellable)
5938 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5939 gimple_seq_add_seq (&new_body, dlist);
5941 new_body = maybe_catch_exception (new_body);
5943 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5944 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5945 t = gimple_build_omp_return (nowait);
5946 gimple_seq_add_stmt (&new_body, t);
5947 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5949 gimple_bind_set_body (new_stmt, new_body);
5953 /* A subroutine of lower_omp_single. Expand the simple form of
5954 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5956 if (GOMP_single_start ())
5957 BODY;
5958 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5960 FIXME. It may be better to delay expanding the logic of this until
5961 pass_expand_omp. The expanded logic may make the job more difficult
5962 to a synchronization analysis pass. */
5964 static void
5965 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5967 location_t loc = gimple_location (single_stmt);
5968 tree tlabel = create_artificial_label (loc);
5969 tree flabel = create_artificial_label (loc);
5970 gimple *call, *cond;
5971 tree lhs, decl;
5973 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5974 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5975 call = gimple_build_call (decl, 0);
5976 gimple_call_set_lhs (call, lhs);
5977 gimple_seq_add_stmt (pre_p, call);
5979 cond = gimple_build_cond (EQ_EXPR, lhs,
5980 fold_convert_loc (loc, TREE_TYPE (lhs),
5981 boolean_true_node),
5982 tlabel, flabel);
5983 gimple_seq_add_stmt (pre_p, cond);
5984 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5985 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5986 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5990 /* A subroutine of lower_omp_single. Expand the simple form of
5991 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5993 #pragma omp single copyprivate (a, b, c)
5995 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5998 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6000 BODY;
6001 copyout.a = a;
6002 copyout.b = b;
6003 copyout.c = c;
6004 GOMP_single_copy_end (&copyout);
6006 else
6008 a = copyout_p->a;
6009 b = copyout_p->b;
6010 c = copyout_p->c;
6012 GOMP_barrier ();
6015 FIXME. It may be better to delay expanding the logic of this until
6016 pass_expand_omp. The expanded logic may make the job more difficult
6017 to a synchronization analysis pass. */
6019 static void
6020 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6021 omp_context *ctx)
6023 tree ptr_type, t, l0, l1, l2, bfn_decl;
6024 gimple_seq copyin_seq;
6025 location_t loc = gimple_location (single_stmt);
6027 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6029 ptr_type = build_pointer_type (ctx->record_type);
6030 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6032 l0 = create_artificial_label (loc);
6033 l1 = create_artificial_label (loc);
6034 l2 = create_artificial_label (loc);
6036 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6037 t = build_call_expr_loc (loc, bfn_decl, 0);
6038 t = fold_convert_loc (loc, ptr_type, t);
6039 gimplify_assign (ctx->receiver_decl, t, pre_p);
6041 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6042 build_int_cst (ptr_type, 0));
6043 t = build3 (COND_EXPR, void_type_node, t,
6044 build_and_jump (&l0), build_and_jump (&l1));
6045 gimplify_and_add (t, pre_p);
6047 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6049 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6051 copyin_seq = NULL;
6052 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6053 &copyin_seq, ctx);
6055 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6056 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6057 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6058 gimplify_and_add (t, pre_p);
6060 t = build_and_jump (&l2);
6061 gimplify_and_add (t, pre_p);
6063 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6065 gimple_seq_add_seq (pre_p, copyin_seq);
6067 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6071 /* Expand code for an OpenMP single directive. */
6073 static void
6074 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6076 tree block;
6077 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6078 gbind *bind;
6079 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6081 push_gimplify_context ();
6083 block = make_node (BLOCK);
6084 bind = gimple_build_bind (NULL, NULL, block);
6085 gsi_replace (gsi_p, bind, true);
6086 bind_body = NULL;
6087 dlist = NULL;
6088 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6089 &bind_body, &dlist, ctx, NULL);
6090 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6092 gimple_seq_add_stmt (&bind_body, single_stmt);
6094 if (ctx->record_type)
6095 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6096 else
6097 lower_omp_single_simple (single_stmt, &bind_body);
6099 gimple_omp_set_body (single_stmt, NULL);
6101 gimple_seq_add_seq (&bind_body, dlist);
6103 bind_body = maybe_catch_exception (bind_body);
6105 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6106 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6107 gimple *g = gimple_build_omp_return (nowait);
6108 gimple_seq_add_stmt (&bind_body_tail, g);
6109 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6110 if (ctx->record_type)
6112 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6113 tree clobber = build_constructor (ctx->record_type, NULL);
6114 TREE_THIS_VOLATILE (clobber) = 1;
6115 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6116 clobber), GSI_SAME_STMT);
6118 gimple_seq_add_seq (&bind_body, bind_body_tail);
6119 gimple_bind_set_body (bind, bind_body);
6121 pop_gimplify_context (bind);
6123 gimple_bind_append_vars (bind, ctx->block_vars);
6124 BLOCK_VARS (block) = ctx->block_vars;
6125 if (BLOCK_VARS (block))
6126 TREE_USED (block) = 1;
6130 /* Expand code for an OpenMP master directive. */
6132 static void
6133 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6135 tree block, lab = NULL, x, bfn_decl;
6136 gimple *stmt = gsi_stmt (*gsi_p);
6137 gbind *bind;
6138 location_t loc = gimple_location (stmt);
6139 gimple_seq tseq;
6141 push_gimplify_context ();
6143 block = make_node (BLOCK);
6144 bind = gimple_build_bind (NULL, NULL, block);
6145 gsi_replace (gsi_p, bind, true);
6146 gimple_bind_add_stmt (bind, stmt);
6148 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6149 x = build_call_expr_loc (loc, bfn_decl, 0);
6150 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6151 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6152 tseq = NULL;
6153 gimplify_and_add (x, &tseq);
6154 gimple_bind_add_seq (bind, tseq);
6156 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6157 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6158 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6159 gimple_omp_set_body (stmt, NULL);
6161 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6163 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6165 pop_gimplify_context (bind);
6167 gimple_bind_append_vars (bind, ctx->block_vars);
6168 BLOCK_VARS (block) = ctx->block_vars;
6172 /* Expand code for an OpenMP taskgroup directive. */
6174 static void
6175 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6177 gimple *stmt = gsi_stmt (*gsi_p);
6178 gcall *x;
6179 gbind *bind;
6180 tree block = make_node (BLOCK);
6182 bind = gimple_build_bind (NULL, NULL, block);
6183 gsi_replace (gsi_p, bind, true);
6184 gimple_bind_add_stmt (bind, stmt);
6186 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6188 gimple_bind_add_stmt (bind, x);
6190 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6191 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6192 gimple_omp_set_body (stmt, NULL);
6194 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6196 gimple_bind_append_vars (bind, ctx->block_vars);
6197 BLOCK_VARS (block) = ctx->block_vars;
6201 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6203 static void
6204 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6205 omp_context *ctx)
6207 struct omp_for_data fd;
6208 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6209 return;
6211 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6212 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6213 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6214 if (!fd.ordered)
6215 return;
6217 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6218 tree c = gimple_omp_ordered_clauses (ord_stmt);
6219 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6220 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6222 /* Merge depend clauses from multiple adjacent
6223 #pragma omp ordered depend(sink:...) constructs
6224 into one #pragma omp ordered depend(sink:...), so that
6225 we can optimize them together. */
6226 gimple_stmt_iterator gsi = *gsi_p;
6227 gsi_next (&gsi);
6228 while (!gsi_end_p (gsi))
6230 gimple *stmt = gsi_stmt (gsi);
6231 if (is_gimple_debug (stmt)
6232 || gimple_code (stmt) == GIMPLE_NOP)
6234 gsi_next (&gsi);
6235 continue;
6237 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6238 break;
6239 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6240 c = gimple_omp_ordered_clauses (ord_stmt2);
6241 if (c == NULL_TREE
6242 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6243 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6244 break;
6245 while (*list_p)
6246 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6247 *list_p = c;
6248 gsi_remove (&gsi, true);
6252 /* Canonicalize sink dependence clauses into one folded clause if
6253 possible.
6255 The basic algorithm is to create a sink vector whose first
6256 element is the GCD of all the first elements, and whose remaining
6257 elements are the minimum of the subsequent columns.
6259 We ignore dependence vectors whose first element is zero because
6260 such dependencies are known to be executed by the same thread.
6262 We take into account the direction of the loop, so a minimum
6263 becomes a maximum if the loop is iterating forwards. We also
6264 ignore sink clauses where the loop direction is unknown, or where
6265 the offsets are clearly invalid because they are not a multiple
6266 of the loop increment.
6268 For example:
6270 #pragma omp for ordered(2)
6271 for (i=0; i < N; ++i)
6272 for (j=0; j < M; ++j)
6274 #pragma omp ordered \
6275 depend(sink:i-8,j-2) \
6276 depend(sink:i,j-1) \ // Completely ignored because i+0.
6277 depend(sink:i-4,j-3) \
6278 depend(sink:i-6,j-4)
6279 #pragma omp ordered depend(source)
6282 Folded clause is:
6284 depend(sink:-gcd(8,4,6),-min(2,3,4))
6285 -or-
6286 depend(sink:-2,-2)
6289 /* FIXME: Computing GCD's where the first element is zero is
6290 non-trivial in the presence of collapsed loops. Do this later. */
6291 if (fd.collapse > 1)
6292 return;
6294 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6295 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6296 tree folded_dep = NULL_TREE;
6297 /* TRUE if the first dimension's offset is negative. */
6298 bool neg_offset_p = false;
6300 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6301 unsigned int i;
6302 while ((c = *list_p) != NULL)
6304 bool remove = false;
6306 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6307 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6308 goto next_ordered_clause;
6310 tree vec;
6311 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6312 vec && TREE_CODE (vec) == TREE_LIST;
6313 vec = TREE_CHAIN (vec), ++i)
6315 gcc_assert (i < len);
6317 /* omp_extract_for_data has canonicalized the condition. */
6318 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6319 || fd.loops[i].cond_code == GT_EXPR);
6320 bool forward = fd.loops[i].cond_code == LT_EXPR;
6321 bool maybe_lexically_later = true;
6323 /* While the committee makes up its mind, bail if we have any
6324 non-constant steps. */
6325 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6326 goto lower_omp_ordered_ret;
6328 tree itype = TREE_TYPE (TREE_VALUE (vec));
6329 if (POINTER_TYPE_P (itype))
6330 itype = sizetype;
6331 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6332 TYPE_PRECISION (itype),
6333 TYPE_SIGN (itype));
6335 /* Ignore invalid offsets that are not multiples of the step. */
6336 if (!wi::multiple_of_p
6337 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6338 UNSIGNED))
6340 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6341 "ignoring sink clause with offset that is not "
6342 "a multiple of the loop step");
6343 remove = true;
6344 goto next_ordered_clause;
6347 /* Calculate the first dimension. The first dimension of
6348 the folded dependency vector is the GCD of the first
6349 elements, while ignoring any first elements whose offset
6350 is 0. */
6351 if (i == 0)
6353 /* Ignore dependence vectors whose first dimension is 0. */
6354 if (offset == 0)
6356 remove = true;
6357 goto next_ordered_clause;
6359 else
6361 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6363 error_at (OMP_CLAUSE_LOCATION (c),
6364 "first offset must be in opposite direction "
6365 "of loop iterations");
6366 goto lower_omp_ordered_ret;
6368 if (forward)
6369 offset = -offset;
6370 neg_offset_p = forward;
6371 /* Initialize the first time around. */
6372 if (folded_dep == NULL_TREE)
6374 folded_dep = c;
6375 folded_deps[0] = offset;
6377 else
6378 folded_deps[0] = wi::gcd (folded_deps[0],
6379 offset, UNSIGNED);
6382 /* Calculate minimum for the remaining dimensions. */
6383 else
6385 folded_deps[len + i - 1] = offset;
6386 if (folded_dep == c)
6387 folded_deps[i] = offset;
6388 else if (maybe_lexically_later
6389 && !wi::eq_p (folded_deps[i], offset))
6391 if (forward ^ wi::gts_p (folded_deps[i], offset))
6393 unsigned int j;
6394 folded_dep = c;
6395 for (j = 1; j <= i; j++)
6396 folded_deps[j] = folded_deps[len + j - 1];
6398 else
6399 maybe_lexically_later = false;
6403 gcc_assert (i == len);
6405 remove = true;
6407 next_ordered_clause:
6408 if (remove)
6409 *list_p = OMP_CLAUSE_CHAIN (c);
6410 else
6411 list_p = &OMP_CLAUSE_CHAIN (c);
6414 if (folded_dep)
6416 if (neg_offset_p)
6417 folded_deps[0] = -folded_deps[0];
6419 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6420 if (POINTER_TYPE_P (itype))
6421 itype = sizetype;
6423 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6424 = wide_int_to_tree (itype, folded_deps[0]);
6425 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6426 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6429 lower_omp_ordered_ret:
6431 /* Ordered without clauses is #pragma omp threads, while we want
6432 a nop instead if we remove all clauses. */
6433 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6434 gsi_replace (gsi_p, gimple_build_nop (), true);
6438 /* Expand code for an OpenMP ordered directive. */
6440 static void
6441 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6443 tree block;
6444 gimple *stmt = gsi_stmt (*gsi_p), *g;
6445 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6446 gcall *x;
6447 gbind *bind;
6448 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6449 OMP_CLAUSE_SIMD);
6450 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6451 loop. */
6452 bool maybe_simt
6453 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6454 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6455 OMP_CLAUSE_THREADS);
6457 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6458 OMP_CLAUSE_DEPEND))
6460 /* FIXME: This is needs to be moved to the expansion to verify various
6461 conditions only testable on cfg with dominators computed, and also
6462 all the depend clauses to be merged still might need to be available
6463 for the runtime checks. */
6464 if (0)
6465 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6466 return;
6469 push_gimplify_context ();
6471 block = make_node (BLOCK);
6472 bind = gimple_build_bind (NULL, NULL, block);
6473 gsi_replace (gsi_p, bind, true);
6474 gimple_bind_add_stmt (bind, stmt);
6476 if (simd)
6478 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6479 build_int_cst (NULL_TREE, threads));
6480 cfun->has_simduid_loops = true;
6482 else
6483 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6485 gimple_bind_add_stmt (bind, x);
6487 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6488 if (maybe_simt)
6490 counter = create_tmp_var (integer_type_node);
6491 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6492 gimple_call_set_lhs (g, counter);
6493 gimple_bind_add_stmt (bind, g);
6495 body = create_artificial_label (UNKNOWN_LOCATION);
6496 test = create_artificial_label (UNKNOWN_LOCATION);
6497 gimple_bind_add_stmt (bind, gimple_build_label (body));
6499 tree simt_pred = create_tmp_var (integer_type_node);
6500 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6501 gimple_call_set_lhs (g, simt_pred);
6502 gimple_bind_add_stmt (bind, g);
6504 tree t = create_artificial_label (UNKNOWN_LOCATION);
6505 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6506 gimple_bind_add_stmt (bind, g);
6508 gimple_bind_add_stmt (bind, gimple_build_label (t));
6510 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6511 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6512 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6513 gimple_omp_set_body (stmt, NULL);
6515 if (maybe_simt)
6517 gimple_bind_add_stmt (bind, gimple_build_label (test));
6518 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6519 gimple_bind_add_stmt (bind, g);
6521 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6522 tree nonneg = create_tmp_var (integer_type_node);
6523 gimple_seq tseq = NULL;
6524 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6525 gimple_bind_add_seq (bind, tseq);
6527 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6528 gimple_call_set_lhs (g, nonneg);
6529 gimple_bind_add_stmt (bind, g);
6531 tree end = create_artificial_label (UNKNOWN_LOCATION);
6532 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6533 gimple_bind_add_stmt (bind, g);
6535 gimple_bind_add_stmt (bind, gimple_build_label (end));
6537 if (simd)
6538 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6539 build_int_cst (NULL_TREE, threads));
6540 else
6541 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6543 gimple_bind_add_stmt (bind, x);
6545 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6547 pop_gimplify_context (bind);
6549 gimple_bind_append_vars (bind, ctx->block_vars);
6550 BLOCK_VARS (block) = gimple_bind_vars (bind);
6554 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6555 substitution of a couple of function calls. But in the NAMED case,
6556 requires that languages coordinate a symbol name. It is therefore
6557 best put here in common code. */
6559 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6561 static void
6562 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6564 tree block;
6565 tree name, lock, unlock;
6566 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6567 gbind *bind;
6568 location_t loc = gimple_location (stmt);
6569 gimple_seq tbody;
6571 name = gimple_omp_critical_name (stmt);
6572 if (name)
6574 tree decl;
6576 if (!critical_name_mutexes)
6577 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6579 tree *n = critical_name_mutexes->get (name);
6580 if (n == NULL)
6582 char *new_str;
6584 decl = create_tmp_var_raw (ptr_type_node);
6586 new_str = ACONCAT ((".gomp_critical_user_",
6587 IDENTIFIER_POINTER (name), NULL));
6588 DECL_NAME (decl) = get_identifier (new_str);
6589 TREE_PUBLIC (decl) = 1;
6590 TREE_STATIC (decl) = 1;
6591 DECL_COMMON (decl) = 1;
6592 DECL_ARTIFICIAL (decl) = 1;
6593 DECL_IGNORED_P (decl) = 1;
6595 varpool_node::finalize_decl (decl);
6597 critical_name_mutexes->put (name, decl);
6599 else
6600 decl = *n;
6602 /* If '#pragma omp critical' is inside offloaded region or
6603 inside function marked as offloadable, the symbol must be
6604 marked as offloadable too. */
6605 omp_context *octx;
6606 if (cgraph_node::get (current_function_decl)->offloadable)
6607 varpool_node::get_create (decl)->offloadable = 1;
6608 else
6609 for (octx = ctx->outer; octx; octx = octx->outer)
6610 if (is_gimple_omp_offloaded (octx->stmt))
6612 varpool_node::get_create (decl)->offloadable = 1;
6613 break;
6616 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6617 lock = build_call_expr_loc (loc, lock, 1,
6618 build_fold_addr_expr_loc (loc, decl));
6620 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6621 unlock = build_call_expr_loc (loc, unlock, 1,
6622 build_fold_addr_expr_loc (loc, decl));
6624 else
6626 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6627 lock = build_call_expr_loc (loc, lock, 0);
6629 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6630 unlock = build_call_expr_loc (loc, unlock, 0);
6633 push_gimplify_context ();
6635 block = make_node (BLOCK);
6636 bind = gimple_build_bind (NULL, NULL, block);
6637 gsi_replace (gsi_p, bind, true);
6638 gimple_bind_add_stmt (bind, stmt);
6640 tbody = gimple_bind_body (bind);
6641 gimplify_and_add (lock, &tbody);
6642 gimple_bind_set_body (bind, tbody);
6644 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6645 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6646 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6647 gimple_omp_set_body (stmt, NULL);
6649 tbody = gimple_bind_body (bind);
6650 gimplify_and_add (unlock, &tbody);
6651 gimple_bind_set_body (bind, tbody);
6653 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6655 pop_gimplify_context (bind);
6656 gimple_bind_append_vars (bind, ctx->block_vars);
6657 BLOCK_VARS (block) = gimple_bind_vars (bind);
6660 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6661 for a lastprivate clause. Given a loop control predicate of (V
6662 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6663 is appended to *DLIST, iterator initialization is appended to
6664 *BODY_P. */
6666 static void
6667 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6668 gimple_seq *dlist, struct omp_context *ctx)
6670 tree clauses, cond, vinit;
6671 enum tree_code cond_code;
6672 gimple_seq stmts;
6674 cond_code = fd->loop.cond_code;
6675 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6677 /* When possible, use a strict equality expression. This can let VRP
6678 type optimizations deduce the value and remove a copy. */
6679 if (tree_fits_shwi_p (fd->loop.step))
6681 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6682 if (step == 1 || step == -1)
6683 cond_code = EQ_EXPR;
6686 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6687 || gimple_omp_for_grid_phony (fd->for_stmt))
6688 cond = omp_grid_lastprivate_predicate (fd);
6689 else
6691 tree n2 = fd->loop.n2;
6692 if (fd->collapse > 1
6693 && TREE_CODE (n2) != INTEGER_CST
6694 && gimple_omp_for_combined_into_p (fd->for_stmt))
6696 struct omp_context *taskreg_ctx = NULL;
6697 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6699 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6700 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6701 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6703 if (gimple_omp_for_combined_into_p (gfor))
6705 gcc_assert (ctx->outer->outer
6706 && is_parallel_ctx (ctx->outer->outer));
6707 taskreg_ctx = ctx->outer->outer;
6709 else
6711 struct omp_for_data outer_fd;
6712 omp_extract_for_data (gfor, &outer_fd, NULL);
6713 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6716 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6717 taskreg_ctx = ctx->outer->outer;
6719 else if (is_taskreg_ctx (ctx->outer))
6720 taskreg_ctx = ctx->outer;
6721 if (taskreg_ctx)
6723 int i;
6724 tree taskreg_clauses
6725 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6726 tree innerc = omp_find_clause (taskreg_clauses,
6727 OMP_CLAUSE__LOOPTEMP_);
6728 gcc_assert (innerc);
6729 for (i = 0; i < fd->collapse; i++)
6731 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6732 OMP_CLAUSE__LOOPTEMP_);
6733 gcc_assert (innerc);
6735 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6736 OMP_CLAUSE__LOOPTEMP_);
6737 if (innerc)
6738 n2 = fold_convert (TREE_TYPE (n2),
6739 lookup_decl (OMP_CLAUSE_DECL (innerc),
6740 taskreg_ctx));
6743 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6746 clauses = gimple_omp_for_clauses (fd->for_stmt);
6747 stmts = NULL;
6748 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6749 if (!gimple_seq_empty_p (stmts))
6751 gimple_seq_add_seq (&stmts, *dlist);
6752 *dlist = stmts;
6754 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6755 vinit = fd->loop.n1;
6756 if (cond_code == EQ_EXPR
6757 && tree_fits_shwi_p (fd->loop.n2)
6758 && ! integer_zerop (fd->loop.n2))
6759 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6760 else
6761 vinit = unshare_expr (vinit);
6763 /* Initialize the iterator variable, so that threads that don't execute
6764 any iterations don't execute the lastprivate clauses by accident. */
6765 gimplify_assign (fd->loop.v, vinit, body_p);
6770 /* Lower code for an OMP loop directive. */
6772 static void
6773 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6775 tree *rhs_p, block;
6776 struct omp_for_data fd, *fdp = NULL;
6777 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6778 gbind *new_stmt;
6779 gimple_seq omp_for_body, body, dlist;
6780 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6781 size_t i;
6783 push_gimplify_context ();
6785 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6787 block = make_node (BLOCK);
6788 new_stmt = gimple_build_bind (NULL, NULL, block);
6789 /* Replace at gsi right away, so that 'stmt' is no member
6790 of a sequence anymore as we're going to add to a different
6791 one below. */
6792 gsi_replace (gsi_p, new_stmt, true);
6794 /* Move declaration of temporaries in the loop body before we make
6795 it go away. */
6796 omp_for_body = gimple_omp_body (stmt);
6797 if (!gimple_seq_empty_p (omp_for_body)
6798 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6800 gbind *inner_bind
6801 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6802 tree vars = gimple_bind_vars (inner_bind);
6803 gimple_bind_append_vars (new_stmt, vars);
6804 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6805 keep them on the inner_bind and it's block. */
6806 gimple_bind_set_vars (inner_bind, NULL_TREE);
6807 if (gimple_bind_block (inner_bind))
6808 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6811 if (gimple_omp_for_combined_into_p (stmt))
6813 omp_extract_for_data (stmt, &fd, NULL);
6814 fdp = &fd;
6816 /* We need two temporaries with fd.loop.v type (istart/iend)
6817 and then (fd.collapse - 1) temporaries with the same
6818 type for count2 ... countN-1 vars if not constant. */
6819 size_t count = 2;
6820 tree type = fd.iter_type;
6821 if (fd.collapse > 1
6822 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6823 count += fd.collapse - 1;
6824 bool taskreg_for
6825 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6826 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6827 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6828 tree simtc = NULL;
6829 tree clauses = *pc;
6830 if (taskreg_for)
6831 outerc
6832 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6833 OMP_CLAUSE__LOOPTEMP_);
6834 if (ctx->simt_stmt)
6835 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6836 OMP_CLAUSE__LOOPTEMP_);
6837 for (i = 0; i < count; i++)
6839 tree temp;
6840 if (taskreg_for)
6842 gcc_assert (outerc);
6843 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6844 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6845 OMP_CLAUSE__LOOPTEMP_);
6847 else
6849 /* If there are 2 adjacent SIMD stmts, one with _simt_
6850 clause, another without, make sure they have the same
6851 decls in _looptemp_ clauses, because the outer stmt
6852 they are combined into will look up just one inner_stmt. */
6853 if (ctx->simt_stmt)
6854 temp = OMP_CLAUSE_DECL (simtc);
6855 else
6856 temp = create_tmp_var (type);
6857 insert_decl_map (&ctx->outer->cb, temp, temp);
6859 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6860 OMP_CLAUSE_DECL (*pc) = temp;
6861 pc = &OMP_CLAUSE_CHAIN (*pc);
6862 if (ctx->simt_stmt)
6863 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6864 OMP_CLAUSE__LOOPTEMP_);
6866 *pc = clauses;
6869 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6870 dlist = NULL;
6871 body = NULL;
6872 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6873 fdp);
6874 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6876 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6878 /* Lower the header expressions. At this point, we can assume that
6879 the header is of the form:
6881 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6883 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6884 using the .omp_data_s mapping, if needed. */
6885 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6887 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6888 if (!is_gimple_min_invariant (*rhs_p))
6889 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6891 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6892 if (!is_gimple_min_invariant (*rhs_p))
6893 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6895 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6896 if (!is_gimple_min_invariant (*rhs_p))
6897 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6900 /* Once lowered, extract the bounds and clauses. */
6901 omp_extract_for_data (stmt, &fd, NULL);
6903 if (is_gimple_omp_oacc (ctx->stmt)
6904 && !ctx_in_oacc_kernels_region (ctx))
6905 lower_oacc_head_tail (gimple_location (stmt),
6906 gimple_omp_for_clauses (stmt),
6907 &oacc_head, &oacc_tail, ctx);
6909 /* Add OpenACC partitioning and reduction markers just before the loop. */
6910 if (oacc_head)
6911 gimple_seq_add_seq (&body, oacc_head);
6913 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6915 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6916 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6917 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6918 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6920 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6921 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6922 OMP_CLAUSE_LINEAR_STEP (c)
6923 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6924 ctx);
6927 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6928 && gimple_omp_for_grid_phony (stmt));
6929 if (!phony_loop)
6930 gimple_seq_add_stmt (&body, stmt);
6931 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6933 if (!phony_loop)
6934 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6935 fd.loop.v));
6937 /* After the loop, add exit clauses. */
6938 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6940 if (ctx->cancellable)
6941 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6943 gimple_seq_add_seq (&body, dlist);
6945 body = maybe_catch_exception (body);
6947 if (!phony_loop)
6949 /* Region exit marker goes at the end of the loop body. */
6950 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6951 maybe_add_implicit_barrier_cancel (ctx, &body);
6954 /* Add OpenACC joining and reduction markers just after the loop. */
6955 if (oacc_tail)
6956 gimple_seq_add_seq (&body, oacc_tail);
6958 pop_gimplify_context (new_stmt);
6960 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6961 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6962 if (BLOCK_VARS (block))
6963 TREE_USED (block) = 1;
6965 gimple_bind_set_body (new_stmt, body);
6966 gimple_omp_set_body (stmt, NULL);
6967 gimple_omp_for_set_pre_body (stmt, NULL);
6970 /* Callback for walk_stmts. Check if the current statement only contains
6971 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6973 static tree
6974 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6975 bool *handled_ops_p,
6976 struct walk_stmt_info *wi)
6978 int *info = (int *) wi->info;
6979 gimple *stmt = gsi_stmt (*gsi_p);
6981 *handled_ops_p = true;
6982 switch (gimple_code (stmt))
6984 WALK_SUBSTMTS;
6986 case GIMPLE_OMP_FOR:
6987 case GIMPLE_OMP_SECTIONS:
6988 *info = *info == 0 ? 1 : -1;
6989 break;
6990 default:
6991 *info = -1;
6992 break;
6994 return NULL;
6997 struct omp_taskcopy_context
6999 /* This field must be at the beginning, as we do "inheritance": Some
7000 callback functions for tree-inline.c (e.g., omp_copy_decl)
7001 receive a copy_body_data pointer that is up-casted to an
7002 omp_context pointer. */
7003 copy_body_data cb;
7004 omp_context *ctx;
7007 static tree
7008 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7010 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7012 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7013 return create_tmp_var (TREE_TYPE (var));
7015 return var;
7018 static tree
7019 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7021 tree name, new_fields = NULL, type, f;
7023 type = lang_hooks.types.make_type (RECORD_TYPE);
7024 name = DECL_NAME (TYPE_NAME (orig_type));
7025 name = build_decl (gimple_location (tcctx->ctx->stmt),
7026 TYPE_DECL, name, type);
7027 TYPE_NAME (type) = name;
7029 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7031 tree new_f = copy_node (f);
7032 DECL_CONTEXT (new_f) = type;
7033 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7034 TREE_CHAIN (new_f) = new_fields;
7035 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7036 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7037 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7038 &tcctx->cb, NULL);
7039 new_fields = new_f;
7040 tcctx->cb.decl_map->put (f, new_f);
7042 TYPE_FIELDS (type) = nreverse (new_fields);
7043 layout_type (type);
7044 return type;
7047 /* Create task copyfn. */
7049 static void
7050 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7052 struct function *child_cfun;
7053 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7054 tree record_type, srecord_type, bind, list;
7055 bool record_needs_remap = false, srecord_needs_remap = false;
7056 splay_tree_node n;
7057 struct omp_taskcopy_context tcctx;
7058 location_t loc = gimple_location (task_stmt);
7060 child_fn = gimple_omp_task_copy_fn (task_stmt);
7061 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7062 gcc_assert (child_cfun->cfg == NULL);
7063 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7065 /* Reset DECL_CONTEXT on function arguments. */
7066 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7067 DECL_CONTEXT (t) = child_fn;
7069 /* Populate the function. */
7070 push_gimplify_context ();
7071 push_cfun (child_cfun);
7073 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7074 TREE_SIDE_EFFECTS (bind) = 1;
7075 list = NULL;
7076 DECL_SAVED_TREE (child_fn) = bind;
7077 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7079 /* Remap src and dst argument types if needed. */
7080 record_type = ctx->record_type;
7081 srecord_type = ctx->srecord_type;
7082 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7083 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7085 record_needs_remap = true;
7086 break;
7088 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7089 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7091 srecord_needs_remap = true;
7092 break;
7095 if (record_needs_remap || srecord_needs_remap)
7097 memset (&tcctx, '\0', sizeof (tcctx));
7098 tcctx.cb.src_fn = ctx->cb.src_fn;
7099 tcctx.cb.dst_fn = child_fn;
7100 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7101 gcc_checking_assert (tcctx.cb.src_node);
7102 tcctx.cb.dst_node = tcctx.cb.src_node;
7103 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7104 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7105 tcctx.cb.eh_lp_nr = 0;
7106 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7107 tcctx.cb.decl_map = new hash_map<tree, tree>;
7108 tcctx.ctx = ctx;
7110 if (record_needs_remap)
7111 record_type = task_copyfn_remap_type (&tcctx, record_type);
7112 if (srecord_needs_remap)
7113 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7115 else
7116 tcctx.cb.decl_map = NULL;
7118 arg = DECL_ARGUMENTS (child_fn);
7119 TREE_TYPE (arg) = build_pointer_type (record_type);
7120 sarg = DECL_CHAIN (arg);
7121 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7123 /* First pass: initialize temporaries used in record_type and srecord_type
7124 sizes and field offsets. */
7125 if (tcctx.cb.decl_map)
7126 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7127 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7129 tree *p;
7131 decl = OMP_CLAUSE_DECL (c);
7132 p = tcctx.cb.decl_map->get (decl);
7133 if (p == NULL)
7134 continue;
7135 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7136 sf = (tree) n->value;
7137 sf = *tcctx.cb.decl_map->get (sf);
7138 src = build_simple_mem_ref_loc (loc, sarg);
7139 src = omp_build_component_ref (src, sf);
7140 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7141 append_to_statement_list (t, &list);
7144 /* Second pass: copy shared var pointers and copy construct non-VLA
7145 firstprivate vars. */
7146 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7147 switch (OMP_CLAUSE_CODE (c))
7149 splay_tree_key key;
7150 case OMP_CLAUSE_SHARED:
7151 decl = OMP_CLAUSE_DECL (c);
7152 key = (splay_tree_key) decl;
7153 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7154 key = (splay_tree_key) &DECL_UID (decl);
7155 n = splay_tree_lookup (ctx->field_map, key);
7156 if (n == NULL)
7157 break;
7158 f = (tree) n->value;
7159 if (tcctx.cb.decl_map)
7160 f = *tcctx.cb.decl_map->get (f);
7161 n = splay_tree_lookup (ctx->sfield_map, key);
7162 sf = (tree) n->value;
7163 if (tcctx.cb.decl_map)
7164 sf = *tcctx.cb.decl_map->get (sf);
7165 src = build_simple_mem_ref_loc (loc, sarg);
7166 src = omp_build_component_ref (src, sf);
7167 dst = build_simple_mem_ref_loc (loc, arg);
7168 dst = omp_build_component_ref (dst, f);
7169 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7170 append_to_statement_list (t, &list);
7171 break;
7172 case OMP_CLAUSE_FIRSTPRIVATE:
7173 decl = OMP_CLAUSE_DECL (c);
7174 if (is_variable_sized (decl))
7175 break;
7176 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7177 if (n == NULL)
7178 break;
7179 f = (tree) n->value;
7180 if (tcctx.cb.decl_map)
7181 f = *tcctx.cb.decl_map->get (f);
7182 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7183 if (n != NULL)
7185 sf = (tree) n->value;
7186 if (tcctx.cb.decl_map)
7187 sf = *tcctx.cb.decl_map->get (sf);
7188 src = build_simple_mem_ref_loc (loc, sarg);
7189 src = omp_build_component_ref (src, sf);
7190 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7191 src = build_simple_mem_ref_loc (loc, src);
7193 else
7194 src = decl;
7195 dst = build_simple_mem_ref_loc (loc, arg);
7196 dst = omp_build_component_ref (dst, f);
7197 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7198 append_to_statement_list (t, &list);
7199 break;
7200 case OMP_CLAUSE_PRIVATE:
7201 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7202 break;
7203 decl = OMP_CLAUSE_DECL (c);
7204 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7205 f = (tree) n->value;
7206 if (tcctx.cb.decl_map)
7207 f = *tcctx.cb.decl_map->get (f);
7208 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7209 if (n != NULL)
7211 sf = (tree) n->value;
7212 if (tcctx.cb.decl_map)
7213 sf = *tcctx.cb.decl_map->get (sf);
7214 src = build_simple_mem_ref_loc (loc, sarg);
7215 src = omp_build_component_ref (src, sf);
7216 if (use_pointer_for_field (decl, NULL))
7217 src = build_simple_mem_ref_loc (loc, src);
7219 else
7220 src = decl;
7221 dst = build_simple_mem_ref_loc (loc, arg);
7222 dst = omp_build_component_ref (dst, f);
7223 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7224 append_to_statement_list (t, &list);
7225 break;
7226 default:
7227 break;
7230 /* Last pass: handle VLA firstprivates. */
7231 if (tcctx.cb.decl_map)
7232 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7233 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7235 tree ind, ptr, df;
7237 decl = OMP_CLAUSE_DECL (c);
7238 if (!is_variable_sized (decl))
7239 continue;
7240 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7241 if (n == NULL)
7242 continue;
7243 f = (tree) n->value;
7244 f = *tcctx.cb.decl_map->get (f);
7245 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7246 ind = DECL_VALUE_EXPR (decl);
7247 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7248 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7249 n = splay_tree_lookup (ctx->sfield_map,
7250 (splay_tree_key) TREE_OPERAND (ind, 0));
7251 sf = (tree) n->value;
7252 sf = *tcctx.cb.decl_map->get (sf);
7253 src = build_simple_mem_ref_loc (loc, sarg);
7254 src = omp_build_component_ref (src, sf);
7255 src = build_simple_mem_ref_loc (loc, src);
7256 dst = build_simple_mem_ref_loc (loc, arg);
7257 dst = omp_build_component_ref (dst, f);
7258 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7259 append_to_statement_list (t, &list);
7260 n = splay_tree_lookup (ctx->field_map,
7261 (splay_tree_key) TREE_OPERAND (ind, 0));
7262 df = (tree) n->value;
7263 df = *tcctx.cb.decl_map->get (df);
7264 ptr = build_simple_mem_ref_loc (loc, arg);
7265 ptr = omp_build_component_ref (ptr, df);
7266 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7267 build_fold_addr_expr_loc (loc, dst));
7268 append_to_statement_list (t, &list);
7271 t = build1 (RETURN_EXPR, void_type_node, NULL);
7272 append_to_statement_list (t, &list);
7274 if (tcctx.cb.decl_map)
7275 delete tcctx.cb.decl_map;
7276 pop_gimplify_context (NULL);
7277 BIND_EXPR_BODY (bind) = list;
7278 pop_cfun ();
7281 static void
7282 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7284 tree c, clauses;
7285 gimple *g;
7286 size_t n_in = 0, n_out = 0, idx = 2, i;
7288 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7289 gcc_assert (clauses);
7290 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7291 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7292 switch (OMP_CLAUSE_DEPEND_KIND (c))
7294 case OMP_CLAUSE_DEPEND_IN:
7295 n_in++;
7296 break;
7297 case OMP_CLAUSE_DEPEND_OUT:
7298 case OMP_CLAUSE_DEPEND_INOUT:
7299 n_out++;
7300 break;
7301 case OMP_CLAUSE_DEPEND_SOURCE:
7302 case OMP_CLAUSE_DEPEND_SINK:
7303 /* FALLTHRU */
7304 default:
7305 gcc_unreachable ();
7307 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7308 tree array = create_tmp_var (type);
7309 TREE_ADDRESSABLE (array) = 1;
7310 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7311 NULL_TREE);
7312 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7313 gimple_seq_add_stmt (iseq, g);
7314 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7315 NULL_TREE);
7316 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7317 gimple_seq_add_stmt (iseq, g);
7318 for (i = 0; i < 2; i++)
7320 if ((i ? n_in : n_out) == 0)
7321 continue;
7322 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7323 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7324 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7326 tree t = OMP_CLAUSE_DECL (c);
7327 t = fold_convert (ptr_type_node, t);
7328 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7329 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7330 NULL_TREE, NULL_TREE);
7331 g = gimple_build_assign (r, t);
7332 gimple_seq_add_stmt (iseq, g);
7335 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7336 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7337 OMP_CLAUSE_CHAIN (c) = *pclauses;
7338 *pclauses = c;
7339 tree clobber = build_constructor (type, NULL);
7340 TREE_THIS_VOLATILE (clobber) = 1;
7341 g = gimple_build_assign (array, clobber);
7342 gimple_seq_add_stmt (oseq, g);
7345 /* Lower the OpenMP parallel or task directive in the current statement
7346 in GSI_P. CTX holds context information for the directive. */
7348 static void
7349 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7351 tree clauses;
7352 tree child_fn, t;
7353 gimple *stmt = gsi_stmt (*gsi_p);
7354 gbind *par_bind, *bind, *dep_bind = NULL;
7355 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7356 location_t loc = gimple_location (stmt);
7358 clauses = gimple_omp_taskreg_clauses (stmt);
7359 par_bind
7360 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7361 par_body = gimple_bind_body (par_bind);
7362 child_fn = ctx->cb.dst_fn;
7363 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7364 && !gimple_omp_parallel_combined_p (stmt))
7366 struct walk_stmt_info wi;
7367 int ws_num = 0;
7369 memset (&wi, 0, sizeof (wi));
7370 wi.info = &ws_num;
7371 wi.val_only = true;
7372 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7373 if (ws_num == 1)
7374 gimple_omp_parallel_set_combined_p (stmt, true);
7376 gimple_seq dep_ilist = NULL;
7377 gimple_seq dep_olist = NULL;
7378 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7379 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7381 push_gimplify_context ();
7382 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7383 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7384 &dep_ilist, &dep_olist);
7387 if (ctx->srecord_type)
7388 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7390 push_gimplify_context ();
7392 par_olist = NULL;
7393 par_ilist = NULL;
7394 par_rlist = NULL;
7395 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7396 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7397 if (phony_construct && ctx->record_type)
7399 gcc_checking_assert (!ctx->receiver_decl);
7400 ctx->receiver_decl = create_tmp_var
7401 (build_reference_type (ctx->record_type), ".omp_rec");
7403 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7404 lower_omp (&par_body, ctx);
7405 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7406 lower_reduction_clauses (clauses, &par_rlist, ctx);
7408 /* Declare all the variables created by mapping and the variables
7409 declared in the scope of the parallel body. */
7410 record_vars_into (ctx->block_vars, child_fn);
7411 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7413 if (ctx->record_type)
7415 ctx->sender_decl
7416 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7417 : ctx->record_type, ".omp_data_o");
7418 DECL_NAMELESS (ctx->sender_decl) = 1;
7419 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7420 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7423 olist = NULL;
7424 ilist = NULL;
7425 lower_send_clauses (clauses, &ilist, &olist, ctx);
7426 lower_send_shared_vars (&ilist, &olist, ctx);
7428 if (ctx->record_type)
7430 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7431 TREE_THIS_VOLATILE (clobber) = 1;
7432 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7433 clobber));
7436 /* Once all the expansions are done, sequence all the different
7437 fragments inside gimple_omp_body. */
7439 new_body = NULL;
7441 if (ctx->record_type)
7443 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7444 /* fixup_child_record_type might have changed receiver_decl's type. */
7445 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7446 gimple_seq_add_stmt (&new_body,
7447 gimple_build_assign (ctx->receiver_decl, t));
7450 gimple_seq_add_seq (&new_body, par_ilist);
7451 gimple_seq_add_seq (&new_body, par_body);
7452 gimple_seq_add_seq (&new_body, par_rlist);
7453 if (ctx->cancellable)
7454 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7455 gimple_seq_add_seq (&new_body, par_olist);
7456 new_body = maybe_catch_exception (new_body);
7457 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7458 gimple_seq_add_stmt (&new_body,
7459 gimple_build_omp_continue (integer_zero_node,
7460 integer_zero_node));
7461 if (!phony_construct)
7463 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7464 gimple_omp_set_body (stmt, new_body);
7467 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7468 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7469 gimple_bind_add_seq (bind, ilist);
7470 if (!phony_construct)
7471 gimple_bind_add_stmt (bind, stmt);
7472 else
7473 gimple_bind_add_seq (bind, new_body);
7474 gimple_bind_add_seq (bind, olist);
7476 pop_gimplify_context (NULL);
7478 if (dep_bind)
7480 gimple_bind_add_seq (dep_bind, dep_ilist);
7481 gimple_bind_add_stmt (dep_bind, bind);
7482 gimple_bind_add_seq (dep_bind, dep_olist);
7483 pop_gimplify_context (dep_bind);
7487 /* Lower the GIMPLE_OMP_TARGET in the current statement
7488 in GSI_P. CTX holds context information for the directive. */
7490 static void
7491 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7493 tree clauses;
7494 tree child_fn, t, c;
7495 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7496 gbind *tgt_bind, *bind, *dep_bind = NULL;
7497 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7498 location_t loc = gimple_location (stmt);
7499 bool offloaded, data_region;
7500 unsigned int map_cnt = 0;
7502 offloaded = is_gimple_omp_offloaded (stmt);
7503 switch (gimple_omp_target_kind (stmt))
7505 case GF_OMP_TARGET_KIND_REGION:
7506 case GF_OMP_TARGET_KIND_UPDATE:
7507 case GF_OMP_TARGET_KIND_ENTER_DATA:
7508 case GF_OMP_TARGET_KIND_EXIT_DATA:
7509 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7510 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7511 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7512 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7513 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7514 data_region = false;
7515 break;
7516 case GF_OMP_TARGET_KIND_DATA:
7517 case GF_OMP_TARGET_KIND_OACC_DATA:
7518 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7519 data_region = true;
7520 break;
7521 default:
7522 gcc_unreachable ();
7525 clauses = gimple_omp_target_clauses (stmt);
7527 gimple_seq dep_ilist = NULL;
7528 gimple_seq dep_olist = NULL;
7529 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7531 push_gimplify_context ();
7532 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7533 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7534 &dep_ilist, &dep_olist);
7537 tgt_bind = NULL;
7538 tgt_body = NULL;
7539 if (offloaded)
7541 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7542 tgt_body = gimple_bind_body (tgt_bind);
7544 else if (data_region)
7545 tgt_body = gimple_omp_body (stmt);
7546 child_fn = ctx->cb.dst_fn;
7548 push_gimplify_context ();
7549 fplist = NULL;
7551 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7552 switch (OMP_CLAUSE_CODE (c))
7554 tree var, x;
7556 default:
7557 break;
7558 case OMP_CLAUSE_MAP:
7559 #if CHECKING_P
7560 /* First check what we're prepared to handle in the following. */
7561 switch (OMP_CLAUSE_MAP_KIND (c))
7563 case GOMP_MAP_ALLOC:
7564 case GOMP_MAP_TO:
7565 case GOMP_MAP_FROM:
7566 case GOMP_MAP_TOFROM:
7567 case GOMP_MAP_POINTER:
7568 case GOMP_MAP_TO_PSET:
7569 case GOMP_MAP_DELETE:
7570 case GOMP_MAP_RELEASE:
7571 case GOMP_MAP_ALWAYS_TO:
7572 case GOMP_MAP_ALWAYS_FROM:
7573 case GOMP_MAP_ALWAYS_TOFROM:
7574 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7575 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7576 case GOMP_MAP_STRUCT:
7577 case GOMP_MAP_ALWAYS_POINTER:
7578 break;
7579 case GOMP_MAP_FORCE_ALLOC:
7580 case GOMP_MAP_FORCE_TO:
7581 case GOMP_MAP_FORCE_FROM:
7582 case GOMP_MAP_FORCE_TOFROM:
7583 case GOMP_MAP_FORCE_PRESENT:
7584 case GOMP_MAP_FORCE_DEVICEPTR:
7585 case GOMP_MAP_DEVICE_RESIDENT:
7586 case GOMP_MAP_LINK:
7587 gcc_assert (is_gimple_omp_oacc (stmt));
7588 break;
7589 default:
7590 gcc_unreachable ();
7592 #endif
7593 /* FALLTHRU */
7594 case OMP_CLAUSE_TO:
7595 case OMP_CLAUSE_FROM:
7596 oacc_firstprivate:
7597 var = OMP_CLAUSE_DECL (c);
7598 if (!DECL_P (var))
7600 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7601 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7602 && (OMP_CLAUSE_MAP_KIND (c)
7603 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7604 map_cnt++;
7605 continue;
7608 if (DECL_SIZE (var)
7609 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7611 tree var2 = DECL_VALUE_EXPR (var);
7612 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7613 var2 = TREE_OPERAND (var2, 0);
7614 gcc_assert (DECL_P (var2));
7615 var = var2;
7618 if (offloaded
7619 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7620 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7621 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7623 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7625 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7626 && varpool_node::get_create (var)->offloadable)
7627 continue;
7629 tree type = build_pointer_type (TREE_TYPE (var));
7630 tree new_var = lookup_decl (var, ctx);
7631 x = create_tmp_var_raw (type, get_name (new_var));
7632 gimple_add_tmp_var (x);
7633 x = build_simple_mem_ref (x);
7634 SET_DECL_VALUE_EXPR (new_var, x);
7635 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7637 continue;
7640 if (!maybe_lookup_field (var, ctx))
7641 continue;
7643 /* Don't remap oacc parallel reduction variables, because the
7644 intermediate result must be local to each gang. */
7645 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7646 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7648 x = build_receiver_ref (var, true, ctx);
7649 tree new_var = lookup_decl (var, ctx);
7651 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7652 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7653 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7654 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7655 x = build_simple_mem_ref (x);
7656 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7658 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7659 if (omp_is_reference (new_var))
7661 /* Create a local object to hold the instance
7662 value. */
7663 tree type = TREE_TYPE (TREE_TYPE (new_var));
7664 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7665 tree inst = create_tmp_var (type, id);
7666 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7667 x = build_fold_addr_expr (inst);
7669 gimplify_assign (new_var, x, &fplist);
7671 else if (DECL_P (new_var))
7673 SET_DECL_VALUE_EXPR (new_var, x);
7674 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7676 else
7677 gcc_unreachable ();
7679 map_cnt++;
7680 break;
7682 case OMP_CLAUSE_FIRSTPRIVATE:
7683 if (is_oacc_parallel (ctx))
7684 goto oacc_firstprivate;
7685 map_cnt++;
7686 var = OMP_CLAUSE_DECL (c);
7687 if (!omp_is_reference (var)
7688 && !is_gimple_reg_type (TREE_TYPE (var)))
7690 tree new_var = lookup_decl (var, ctx);
7691 if (is_variable_sized (var))
7693 tree pvar = DECL_VALUE_EXPR (var);
7694 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7695 pvar = TREE_OPERAND (pvar, 0);
7696 gcc_assert (DECL_P (pvar));
7697 tree new_pvar = lookup_decl (pvar, ctx);
7698 x = build_fold_indirect_ref (new_pvar);
7699 TREE_THIS_NOTRAP (x) = 1;
7701 else
7702 x = build_receiver_ref (var, true, ctx);
7703 SET_DECL_VALUE_EXPR (new_var, x);
7704 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7706 break;
7708 case OMP_CLAUSE_PRIVATE:
7709 if (is_gimple_omp_oacc (ctx->stmt))
7710 break;
7711 var = OMP_CLAUSE_DECL (c);
7712 if (is_variable_sized (var))
7714 tree new_var = lookup_decl (var, ctx);
7715 tree pvar = DECL_VALUE_EXPR (var);
7716 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7717 pvar = TREE_OPERAND (pvar, 0);
7718 gcc_assert (DECL_P (pvar));
7719 tree new_pvar = lookup_decl (pvar, ctx);
7720 x = build_fold_indirect_ref (new_pvar);
7721 TREE_THIS_NOTRAP (x) = 1;
7722 SET_DECL_VALUE_EXPR (new_var, x);
7723 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7725 break;
7727 case OMP_CLAUSE_USE_DEVICE_PTR:
7728 case OMP_CLAUSE_IS_DEVICE_PTR:
7729 var = OMP_CLAUSE_DECL (c);
7730 map_cnt++;
7731 if (is_variable_sized (var))
7733 tree new_var = lookup_decl (var, ctx);
7734 tree pvar = DECL_VALUE_EXPR (var);
7735 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7736 pvar = TREE_OPERAND (pvar, 0);
7737 gcc_assert (DECL_P (pvar));
7738 tree new_pvar = lookup_decl (pvar, ctx);
7739 x = build_fold_indirect_ref (new_pvar);
7740 TREE_THIS_NOTRAP (x) = 1;
7741 SET_DECL_VALUE_EXPR (new_var, x);
7742 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7744 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7746 tree new_var = lookup_decl (var, ctx);
7747 tree type = build_pointer_type (TREE_TYPE (var));
7748 x = create_tmp_var_raw (type, get_name (new_var));
7749 gimple_add_tmp_var (x);
7750 x = build_simple_mem_ref (x);
7751 SET_DECL_VALUE_EXPR (new_var, x);
7752 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7754 else
7756 tree new_var = lookup_decl (var, ctx);
7757 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7758 gimple_add_tmp_var (x);
7759 SET_DECL_VALUE_EXPR (new_var, x);
7760 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7762 break;
7765 if (offloaded)
7767 target_nesting_level++;
7768 lower_omp (&tgt_body, ctx);
7769 target_nesting_level--;
7771 else if (data_region)
7772 lower_omp (&tgt_body, ctx);
7774 if (offloaded)
7776 /* Declare all the variables created by mapping and the variables
7777 declared in the scope of the target body. */
7778 record_vars_into (ctx->block_vars, child_fn);
7779 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7782 olist = NULL;
7783 ilist = NULL;
7784 if (ctx->record_type)
7786 ctx->sender_decl
7787 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7788 DECL_NAMELESS (ctx->sender_decl) = 1;
7789 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7790 t = make_tree_vec (3);
7791 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7792 TREE_VEC_ELT (t, 1)
7793 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7794 ".omp_data_sizes");
7795 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7796 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7797 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7798 tree tkind_type = short_unsigned_type_node;
7799 int talign_shift = 8;
7800 TREE_VEC_ELT (t, 2)
7801 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7802 ".omp_data_kinds");
7803 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7804 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7805 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7806 gimple_omp_target_set_data_arg (stmt, t);
7808 vec<constructor_elt, va_gc> *vsize;
7809 vec<constructor_elt, va_gc> *vkind;
7810 vec_alloc (vsize, map_cnt);
7811 vec_alloc (vkind, map_cnt);
7812 unsigned int map_idx = 0;
7814 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7815 switch (OMP_CLAUSE_CODE (c))
7817 tree ovar, nc, s, purpose, var, x, type;
7818 unsigned int talign;
7820 default:
7821 break;
7823 case OMP_CLAUSE_MAP:
7824 case OMP_CLAUSE_TO:
7825 case OMP_CLAUSE_FROM:
7826 oacc_firstprivate_map:
7827 nc = c;
7828 ovar = OMP_CLAUSE_DECL (c);
7829 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7830 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7831 || (OMP_CLAUSE_MAP_KIND (c)
7832 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7833 break;
7834 if (!DECL_P (ovar))
7836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7837 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7839 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7840 == get_base_address (ovar));
7841 nc = OMP_CLAUSE_CHAIN (c);
7842 ovar = OMP_CLAUSE_DECL (nc);
7844 else
7846 tree x = build_sender_ref (ovar, ctx);
7847 tree v
7848 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7849 gimplify_assign (x, v, &ilist);
7850 nc = NULL_TREE;
7853 else
7855 if (DECL_SIZE (ovar)
7856 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7858 tree ovar2 = DECL_VALUE_EXPR (ovar);
7859 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7860 ovar2 = TREE_OPERAND (ovar2, 0);
7861 gcc_assert (DECL_P (ovar2));
7862 ovar = ovar2;
7864 if (!maybe_lookup_field (ovar, ctx))
7865 continue;
7868 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7869 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7870 talign = DECL_ALIGN_UNIT (ovar);
7871 if (nc)
7873 var = lookup_decl_in_outer_ctx (ovar, ctx);
7874 x = build_sender_ref (ovar, ctx);
7876 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7877 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7878 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7879 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7881 gcc_assert (offloaded);
7882 tree avar
7883 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7884 mark_addressable (avar);
7885 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7886 talign = DECL_ALIGN_UNIT (avar);
7887 avar = build_fold_addr_expr (avar);
7888 gimplify_assign (x, avar, &ilist);
7890 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7892 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7893 if (!omp_is_reference (var))
7895 if (is_gimple_reg (var)
7896 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7897 TREE_NO_WARNING (var) = 1;
7898 var = build_fold_addr_expr (var);
7900 else
7901 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7902 gimplify_assign (x, var, &ilist);
7904 else if (is_gimple_reg (var))
7906 gcc_assert (offloaded);
7907 tree avar = create_tmp_var (TREE_TYPE (var));
7908 mark_addressable (avar);
7909 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7910 if (GOMP_MAP_COPY_TO_P (map_kind)
7911 || map_kind == GOMP_MAP_POINTER
7912 || map_kind == GOMP_MAP_TO_PSET
7913 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7915 /* If we need to initialize a temporary
7916 with VAR because it is not addressable, and
7917 the variable hasn't been initialized yet, then
7918 we'll get a warning for the store to avar.
7919 Don't warn in that case, the mapping might
7920 be implicit. */
7921 TREE_NO_WARNING (var) = 1;
7922 gimplify_assign (avar, var, &ilist);
7924 avar = build_fold_addr_expr (avar);
7925 gimplify_assign (x, avar, &ilist);
7926 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7927 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7928 && !TYPE_READONLY (TREE_TYPE (var)))
7930 x = unshare_expr (x);
7931 x = build_simple_mem_ref (x);
7932 gimplify_assign (var, x, &olist);
7935 else
7937 var = build_fold_addr_expr (var);
7938 gimplify_assign (x, var, &ilist);
7941 s = NULL_TREE;
7942 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7944 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7945 s = TREE_TYPE (ovar);
7946 if (TREE_CODE (s) == REFERENCE_TYPE)
7947 s = TREE_TYPE (s);
7948 s = TYPE_SIZE_UNIT (s);
7950 else
7951 s = OMP_CLAUSE_SIZE (c);
7952 if (s == NULL_TREE)
7953 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7954 s = fold_convert (size_type_node, s);
7955 purpose = size_int (map_idx++);
7956 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7957 if (TREE_CODE (s) != INTEGER_CST)
7958 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7960 unsigned HOST_WIDE_INT tkind, tkind_zero;
7961 switch (OMP_CLAUSE_CODE (c))
7963 case OMP_CLAUSE_MAP:
7964 tkind = OMP_CLAUSE_MAP_KIND (c);
7965 tkind_zero = tkind;
7966 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7967 switch (tkind)
7969 case GOMP_MAP_ALLOC:
7970 case GOMP_MAP_TO:
7971 case GOMP_MAP_FROM:
7972 case GOMP_MAP_TOFROM:
7973 case GOMP_MAP_ALWAYS_TO:
7974 case GOMP_MAP_ALWAYS_FROM:
7975 case GOMP_MAP_ALWAYS_TOFROM:
7976 case GOMP_MAP_RELEASE:
7977 case GOMP_MAP_FORCE_TO:
7978 case GOMP_MAP_FORCE_FROM:
7979 case GOMP_MAP_FORCE_TOFROM:
7980 case GOMP_MAP_FORCE_PRESENT:
7981 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7982 break;
7983 case GOMP_MAP_DELETE:
7984 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7985 default:
7986 break;
7988 if (tkind_zero != tkind)
7990 if (integer_zerop (s))
7991 tkind = tkind_zero;
7992 else if (integer_nonzerop (s))
7993 tkind_zero = tkind;
7995 break;
7996 case OMP_CLAUSE_FIRSTPRIVATE:
7997 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7998 tkind = GOMP_MAP_TO;
7999 tkind_zero = tkind;
8000 break;
8001 case OMP_CLAUSE_TO:
8002 tkind = GOMP_MAP_TO;
8003 tkind_zero = tkind;
8004 break;
8005 case OMP_CLAUSE_FROM:
8006 tkind = GOMP_MAP_FROM;
8007 tkind_zero = tkind;
8008 break;
8009 default:
8010 gcc_unreachable ();
8012 gcc_checking_assert (tkind
8013 < (HOST_WIDE_INT_C (1U) << talign_shift));
8014 gcc_checking_assert (tkind_zero
8015 < (HOST_WIDE_INT_C (1U) << talign_shift));
8016 talign = ceil_log2 (talign);
8017 tkind |= talign << talign_shift;
8018 tkind_zero |= talign << talign_shift;
8019 gcc_checking_assert (tkind
8020 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8021 gcc_checking_assert (tkind_zero
8022 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8023 if (tkind == tkind_zero)
8024 x = build_int_cstu (tkind_type, tkind);
8025 else
8027 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8028 x = build3 (COND_EXPR, tkind_type,
8029 fold_build2 (EQ_EXPR, boolean_type_node,
8030 unshare_expr (s), size_zero_node),
8031 build_int_cstu (tkind_type, tkind_zero),
8032 build_int_cstu (tkind_type, tkind));
8034 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8035 if (nc && nc != c)
8036 c = nc;
8037 break;
8039 case OMP_CLAUSE_FIRSTPRIVATE:
8040 if (is_oacc_parallel (ctx))
8041 goto oacc_firstprivate_map;
8042 ovar = OMP_CLAUSE_DECL (c);
8043 if (omp_is_reference (ovar))
8044 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8045 else
8046 talign = DECL_ALIGN_UNIT (ovar);
8047 var = lookup_decl_in_outer_ctx (ovar, ctx);
8048 x = build_sender_ref (ovar, ctx);
8049 tkind = GOMP_MAP_FIRSTPRIVATE;
8050 type = TREE_TYPE (ovar);
8051 if (omp_is_reference (ovar))
8052 type = TREE_TYPE (type);
8053 if ((INTEGRAL_TYPE_P (type)
8054 && TYPE_PRECISION (type) <= POINTER_SIZE)
8055 || TREE_CODE (type) == POINTER_TYPE)
8057 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8058 tree t = var;
8059 if (omp_is_reference (var))
8060 t = build_simple_mem_ref (var);
8061 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8062 TREE_NO_WARNING (var) = 1;
8063 if (TREE_CODE (type) != POINTER_TYPE)
8064 t = fold_convert (pointer_sized_int_node, t);
8065 t = fold_convert (TREE_TYPE (x), t);
8066 gimplify_assign (x, t, &ilist);
8068 else if (omp_is_reference (var))
8069 gimplify_assign (x, var, &ilist);
8070 else if (is_gimple_reg (var))
8072 tree avar = create_tmp_var (TREE_TYPE (var));
8073 mark_addressable (avar);
8074 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8075 TREE_NO_WARNING (var) = 1;
8076 gimplify_assign (avar, var, &ilist);
8077 avar = build_fold_addr_expr (avar);
8078 gimplify_assign (x, avar, &ilist);
8080 else
8082 var = build_fold_addr_expr (var);
8083 gimplify_assign (x, var, &ilist);
8085 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8086 s = size_int (0);
8087 else if (omp_is_reference (ovar))
8088 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8089 else
8090 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8091 s = fold_convert (size_type_node, s);
8092 purpose = size_int (map_idx++);
8093 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8094 if (TREE_CODE (s) != INTEGER_CST)
8095 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8097 gcc_checking_assert (tkind
8098 < (HOST_WIDE_INT_C (1U) << talign_shift));
8099 talign = ceil_log2 (talign);
8100 tkind |= talign << talign_shift;
8101 gcc_checking_assert (tkind
8102 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8103 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8104 build_int_cstu (tkind_type, tkind));
8105 break;
8107 case OMP_CLAUSE_USE_DEVICE_PTR:
8108 case OMP_CLAUSE_IS_DEVICE_PTR:
8109 ovar = OMP_CLAUSE_DECL (c);
8110 var = lookup_decl_in_outer_ctx (ovar, ctx);
8111 x = build_sender_ref (ovar, ctx);
8112 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8113 tkind = GOMP_MAP_USE_DEVICE_PTR;
8114 else
8115 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8116 type = TREE_TYPE (ovar);
8117 if (TREE_CODE (type) == ARRAY_TYPE)
8118 var = build_fold_addr_expr (var);
8119 else
8121 if (omp_is_reference (ovar))
8123 type = TREE_TYPE (type);
8124 if (TREE_CODE (type) != ARRAY_TYPE)
8125 var = build_simple_mem_ref (var);
8126 var = fold_convert (TREE_TYPE (x), var);
8129 gimplify_assign (x, var, &ilist);
8130 s = size_int (0);
8131 purpose = size_int (map_idx++);
8132 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8133 gcc_checking_assert (tkind
8134 < (HOST_WIDE_INT_C (1U) << talign_shift));
8135 gcc_checking_assert (tkind
8136 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8137 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8138 build_int_cstu (tkind_type, tkind));
8139 break;
8142 gcc_assert (map_idx == map_cnt);
8144 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8145 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8146 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8147 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8148 for (int i = 1; i <= 2; i++)
8149 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8151 gimple_seq initlist = NULL;
8152 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8153 TREE_VEC_ELT (t, i)),
8154 &initlist, true, NULL_TREE);
8155 gimple_seq_add_seq (&ilist, initlist);
8157 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8158 NULL);
8159 TREE_THIS_VOLATILE (clobber) = 1;
8160 gimple_seq_add_stmt (&olist,
8161 gimple_build_assign (TREE_VEC_ELT (t, i),
8162 clobber));
8165 tree clobber = build_constructor (ctx->record_type, NULL);
8166 TREE_THIS_VOLATILE (clobber) = 1;
8167 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8168 clobber));
8171 /* Once all the expansions are done, sequence all the different
8172 fragments inside gimple_omp_body. */
8174 new_body = NULL;
8176 if (offloaded
8177 && ctx->record_type)
8179 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8180 /* fixup_child_record_type might have changed receiver_decl's type. */
8181 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8182 gimple_seq_add_stmt (&new_body,
8183 gimple_build_assign (ctx->receiver_decl, t));
8185 gimple_seq_add_seq (&new_body, fplist);
8187 if (offloaded || data_region)
8189 tree prev = NULL_TREE;
8190 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8191 switch (OMP_CLAUSE_CODE (c))
8193 tree var, x;
8194 default:
8195 break;
8196 case OMP_CLAUSE_FIRSTPRIVATE:
8197 if (is_gimple_omp_oacc (ctx->stmt))
8198 break;
8199 var = OMP_CLAUSE_DECL (c);
8200 if (omp_is_reference (var)
8201 || is_gimple_reg_type (TREE_TYPE (var)))
8203 tree new_var = lookup_decl (var, ctx);
8204 tree type;
8205 type = TREE_TYPE (var);
8206 if (omp_is_reference (var))
8207 type = TREE_TYPE (type);
8208 if ((INTEGRAL_TYPE_P (type)
8209 && TYPE_PRECISION (type) <= POINTER_SIZE)
8210 || TREE_CODE (type) == POINTER_TYPE)
8212 x = build_receiver_ref (var, false, ctx);
8213 if (TREE_CODE (type) != POINTER_TYPE)
8214 x = fold_convert (pointer_sized_int_node, x);
8215 x = fold_convert (type, x);
8216 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8217 fb_rvalue);
8218 if (omp_is_reference (var))
8220 tree v = create_tmp_var_raw (type, get_name (var));
8221 gimple_add_tmp_var (v);
8222 TREE_ADDRESSABLE (v) = 1;
8223 gimple_seq_add_stmt (&new_body,
8224 gimple_build_assign (v, x));
8225 x = build_fold_addr_expr (v);
8227 gimple_seq_add_stmt (&new_body,
8228 gimple_build_assign (new_var, x));
8230 else
8232 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8233 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8234 fb_rvalue);
8235 gimple_seq_add_stmt (&new_body,
8236 gimple_build_assign (new_var, x));
8239 else if (is_variable_sized (var))
8241 tree pvar = DECL_VALUE_EXPR (var);
8242 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8243 pvar = TREE_OPERAND (pvar, 0);
8244 gcc_assert (DECL_P (pvar));
8245 tree new_var = lookup_decl (pvar, ctx);
8246 x = build_receiver_ref (var, false, ctx);
8247 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8248 gimple_seq_add_stmt (&new_body,
8249 gimple_build_assign (new_var, x));
8251 break;
8252 case OMP_CLAUSE_PRIVATE:
8253 if (is_gimple_omp_oacc (ctx->stmt))
8254 break;
8255 var = OMP_CLAUSE_DECL (c);
8256 if (omp_is_reference (var))
8258 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8259 tree new_var = lookup_decl (var, ctx);
8260 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8261 if (TREE_CONSTANT (x))
8263 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8264 get_name (var));
8265 gimple_add_tmp_var (x);
8266 TREE_ADDRESSABLE (x) = 1;
8267 x = build_fold_addr_expr_loc (clause_loc, x);
8269 else
8270 break;
8272 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8273 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8274 gimple_seq_add_stmt (&new_body,
8275 gimple_build_assign (new_var, x));
8277 break;
8278 case OMP_CLAUSE_USE_DEVICE_PTR:
8279 case OMP_CLAUSE_IS_DEVICE_PTR:
8280 var = OMP_CLAUSE_DECL (c);
8281 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8282 x = build_sender_ref (var, ctx);
8283 else
8284 x = build_receiver_ref (var, false, ctx);
8285 if (is_variable_sized (var))
8287 tree pvar = DECL_VALUE_EXPR (var);
8288 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8289 pvar = TREE_OPERAND (pvar, 0);
8290 gcc_assert (DECL_P (pvar));
8291 tree new_var = lookup_decl (pvar, ctx);
8292 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8293 gimple_seq_add_stmt (&new_body,
8294 gimple_build_assign (new_var, x));
8296 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8298 tree new_var = lookup_decl (var, ctx);
8299 new_var = DECL_VALUE_EXPR (new_var);
8300 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8301 new_var = TREE_OPERAND (new_var, 0);
8302 gcc_assert (DECL_P (new_var));
8303 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8304 gimple_seq_add_stmt (&new_body,
8305 gimple_build_assign (new_var, x));
8307 else
8309 tree type = TREE_TYPE (var);
8310 tree new_var = lookup_decl (var, ctx);
8311 if (omp_is_reference (var))
8313 type = TREE_TYPE (type);
8314 if (TREE_CODE (type) != ARRAY_TYPE)
8316 tree v = create_tmp_var_raw (type, get_name (var));
8317 gimple_add_tmp_var (v);
8318 TREE_ADDRESSABLE (v) = 1;
8319 x = fold_convert (type, x);
8320 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8321 fb_rvalue);
8322 gimple_seq_add_stmt (&new_body,
8323 gimple_build_assign (v, x));
8324 x = build_fold_addr_expr (v);
8327 new_var = DECL_VALUE_EXPR (new_var);
8328 x = fold_convert (TREE_TYPE (new_var), x);
8329 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8330 gimple_seq_add_stmt (&new_body,
8331 gimple_build_assign (new_var, x));
8333 break;
8335 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8336 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8337 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8338 or references to VLAs. */
8339 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8340 switch (OMP_CLAUSE_CODE (c))
8342 tree var;
8343 default:
8344 break;
8345 case OMP_CLAUSE_MAP:
8346 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8347 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8349 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8350 HOST_WIDE_INT offset = 0;
8351 gcc_assert (prev);
8352 var = OMP_CLAUSE_DECL (c);
8353 if (DECL_P (var)
8354 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8355 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8356 ctx))
8357 && varpool_node::get_create (var)->offloadable)
8358 break;
8359 if (TREE_CODE (var) == INDIRECT_REF
8360 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8361 var = TREE_OPERAND (var, 0);
8362 if (TREE_CODE (var) == COMPONENT_REF)
8364 var = get_addr_base_and_unit_offset (var, &offset);
8365 gcc_assert (var != NULL_TREE && DECL_P (var));
8367 else if (DECL_SIZE (var)
8368 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8370 tree var2 = DECL_VALUE_EXPR (var);
8371 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8372 var2 = TREE_OPERAND (var2, 0);
8373 gcc_assert (DECL_P (var2));
8374 var = var2;
8376 tree new_var = lookup_decl (var, ctx), x;
8377 tree type = TREE_TYPE (new_var);
8378 bool is_ref;
8379 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8380 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8381 == COMPONENT_REF))
8383 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8384 is_ref = true;
8385 new_var = build2 (MEM_REF, type,
8386 build_fold_addr_expr (new_var),
8387 build_int_cst (build_pointer_type (type),
8388 offset));
8390 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8392 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8393 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8394 new_var = build2 (MEM_REF, type,
8395 build_fold_addr_expr (new_var),
8396 build_int_cst (build_pointer_type (type),
8397 offset));
8399 else
8400 is_ref = omp_is_reference (var);
8401 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8402 is_ref = false;
8403 bool ref_to_array = false;
8404 if (is_ref)
8406 type = TREE_TYPE (type);
8407 if (TREE_CODE (type) == ARRAY_TYPE)
8409 type = build_pointer_type (type);
8410 ref_to_array = true;
8413 else if (TREE_CODE (type) == ARRAY_TYPE)
8415 tree decl2 = DECL_VALUE_EXPR (new_var);
8416 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8417 decl2 = TREE_OPERAND (decl2, 0);
8418 gcc_assert (DECL_P (decl2));
8419 new_var = decl2;
8420 type = TREE_TYPE (new_var);
8422 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8423 x = fold_convert_loc (clause_loc, type, x);
8424 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8426 tree bias = OMP_CLAUSE_SIZE (c);
8427 if (DECL_P (bias))
8428 bias = lookup_decl (bias, ctx);
8429 bias = fold_convert_loc (clause_loc, sizetype, bias);
8430 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8431 bias);
8432 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8433 TREE_TYPE (x), x, bias);
8435 if (ref_to_array)
8436 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8437 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8438 if (is_ref && !ref_to_array)
8440 tree t = create_tmp_var_raw (type, get_name (var));
8441 gimple_add_tmp_var (t);
8442 TREE_ADDRESSABLE (t) = 1;
8443 gimple_seq_add_stmt (&new_body,
8444 gimple_build_assign (t, x));
8445 x = build_fold_addr_expr_loc (clause_loc, t);
8447 gimple_seq_add_stmt (&new_body,
8448 gimple_build_assign (new_var, x));
8449 prev = NULL_TREE;
8451 else if (OMP_CLAUSE_CHAIN (c)
8452 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8453 == OMP_CLAUSE_MAP
8454 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8455 == GOMP_MAP_FIRSTPRIVATE_POINTER
8456 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8457 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8458 prev = c;
8459 break;
8460 case OMP_CLAUSE_PRIVATE:
8461 var = OMP_CLAUSE_DECL (c);
8462 if (is_variable_sized (var))
8464 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8465 tree new_var = lookup_decl (var, ctx);
8466 tree pvar = DECL_VALUE_EXPR (var);
8467 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8468 pvar = TREE_OPERAND (pvar, 0);
8469 gcc_assert (DECL_P (pvar));
8470 tree new_pvar = lookup_decl (pvar, ctx);
8471 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8472 tree al = size_int (DECL_ALIGN (var));
8473 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8474 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8475 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8476 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8477 gimple_seq_add_stmt (&new_body,
8478 gimple_build_assign (new_pvar, x));
8480 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8482 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8483 tree new_var = lookup_decl (var, ctx);
8484 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8485 if (TREE_CONSTANT (x))
8486 break;
8487 else
8489 tree atmp
8490 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8491 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8492 tree al = size_int (TYPE_ALIGN (rtype));
8493 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8496 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8497 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8498 gimple_seq_add_stmt (&new_body,
8499 gimple_build_assign (new_var, x));
8501 break;
8504 gimple_seq fork_seq = NULL;
8505 gimple_seq join_seq = NULL;
8507 if (is_oacc_parallel (ctx))
8509 /* If there are reductions on the offloaded region itself, treat
8510 them as a dummy GANG loop. */
8511 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8513 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8514 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8517 gimple_seq_add_seq (&new_body, fork_seq);
8518 gimple_seq_add_seq (&new_body, tgt_body);
8519 gimple_seq_add_seq (&new_body, join_seq);
8521 if (offloaded)
8522 new_body = maybe_catch_exception (new_body);
8524 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8525 gimple_omp_set_body (stmt, new_body);
8528 bind = gimple_build_bind (NULL, NULL,
8529 tgt_bind ? gimple_bind_block (tgt_bind)
8530 : NULL_TREE);
8531 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8532 gimple_bind_add_seq (bind, ilist);
8533 gimple_bind_add_stmt (bind, stmt);
8534 gimple_bind_add_seq (bind, olist);
8536 pop_gimplify_context (NULL);
8538 if (dep_bind)
8540 gimple_bind_add_seq (dep_bind, dep_ilist);
8541 gimple_bind_add_stmt (dep_bind, bind);
8542 gimple_bind_add_seq (dep_bind, dep_olist);
8543 pop_gimplify_context (dep_bind);
8547 /* Expand code for an OpenMP teams directive. */
8549 static void
8550 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8552 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8553 push_gimplify_context ();
8555 tree block = make_node (BLOCK);
8556 gbind *bind = gimple_build_bind (NULL, NULL, block);
8557 gsi_replace (gsi_p, bind, true);
8558 gimple_seq bind_body = NULL;
8559 gimple_seq dlist = NULL;
8560 gimple_seq olist = NULL;
8562 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8563 OMP_CLAUSE_NUM_TEAMS);
8564 if (num_teams == NULL_TREE)
8565 num_teams = build_int_cst (unsigned_type_node, 0);
8566 else
8568 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8569 num_teams = fold_convert (unsigned_type_node, num_teams);
8570 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8572 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8573 OMP_CLAUSE_THREAD_LIMIT);
8574 if (thread_limit == NULL_TREE)
8575 thread_limit = build_int_cst (unsigned_type_node, 0);
8576 else
8578 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8579 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8580 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8581 fb_rvalue);
8584 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8585 &bind_body, &dlist, ctx, NULL);
8586 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8587 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8588 if (!gimple_omp_teams_grid_phony (teams_stmt))
8590 gimple_seq_add_stmt (&bind_body, teams_stmt);
8591 location_t loc = gimple_location (teams_stmt);
8592 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8593 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8594 gimple_set_location (call, loc);
8595 gimple_seq_add_stmt (&bind_body, call);
8598 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8599 gimple_omp_set_body (teams_stmt, NULL);
8600 gimple_seq_add_seq (&bind_body, olist);
8601 gimple_seq_add_seq (&bind_body, dlist);
8602 if (!gimple_omp_teams_grid_phony (teams_stmt))
8603 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8604 gimple_bind_set_body (bind, bind_body);
8606 pop_gimplify_context (bind);
8608 gimple_bind_append_vars (bind, ctx->block_vars);
8609 BLOCK_VARS (block) = ctx->block_vars;
8610 if (BLOCK_VARS (block))
8611 TREE_USED (block) = 1;
8614 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8616 static void
8617 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8619 gimple *stmt = gsi_stmt (*gsi_p);
8620 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8621 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8622 gimple_build_omp_return (false));
8626 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8627 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8628 of OMP context, but with task_shared_vars set. */
8630 static tree
8631 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8632 void *data)
8634 tree t = *tp;
8636 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8637 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8638 return t;
8640 if (task_shared_vars
8641 && DECL_P (t)
8642 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8643 return t;
8645 /* If a global variable has been privatized, TREE_CONSTANT on
8646 ADDR_EXPR might be wrong. */
8647 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8648 recompute_tree_invariant_for_addr_expr (t);
8650 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8651 return NULL_TREE;
8654 /* Data to be communicated between lower_omp_regimplify_operands and
8655 lower_omp_regimplify_operands_p. */
8657 struct lower_omp_regimplify_operands_data
8659 omp_context *ctx;
8660 vec<tree> *decls;
8663 /* Helper function for lower_omp_regimplify_operands. Find
8664 omp_member_access_dummy_var vars and adjust temporarily their
8665 DECL_VALUE_EXPRs if needed. */
8667 static tree
8668 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8669 void *data)
8671 tree t = omp_member_access_dummy_var (*tp);
8672 if (t)
8674 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8675 lower_omp_regimplify_operands_data *ldata
8676 = (lower_omp_regimplify_operands_data *) wi->info;
8677 tree o = maybe_lookup_decl (t, ldata->ctx);
8678 if (o != t)
8680 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8681 ldata->decls->safe_push (*tp);
8682 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8683 SET_DECL_VALUE_EXPR (*tp, v);
8686 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8687 return NULL_TREE;
8690 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8691 of omp_member_access_dummy_var vars during regimplification. */
8693 static void
8694 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8695 gimple_stmt_iterator *gsi_p)
8697 auto_vec<tree, 10> decls;
8698 if (ctx)
8700 struct walk_stmt_info wi;
8701 memset (&wi, '\0', sizeof (wi));
8702 struct lower_omp_regimplify_operands_data data;
8703 data.ctx = ctx;
8704 data.decls = &decls;
8705 wi.info = &data;
8706 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8708 gimple_regimplify_operands (stmt, gsi_p);
8709 while (!decls.is_empty ())
8711 tree t = decls.pop ();
8712 tree v = decls.pop ();
8713 SET_DECL_VALUE_EXPR (t, v);
8717 static void
8718 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8720 gimple *stmt = gsi_stmt (*gsi_p);
8721 struct walk_stmt_info wi;
8722 gcall *call_stmt;
8724 if (gimple_has_location (stmt))
8725 input_location = gimple_location (stmt);
8727 if (task_shared_vars)
8728 memset (&wi, '\0', sizeof (wi));
8730 /* If we have issued syntax errors, avoid doing any heavy lifting.
8731 Just replace the OMP directives with a NOP to avoid
8732 confusing RTL expansion. */
8733 if (seen_error () && is_gimple_omp (stmt))
8735 gsi_replace (gsi_p, gimple_build_nop (), true);
8736 return;
8739 switch (gimple_code (stmt))
8741 case GIMPLE_COND:
8743 gcond *cond_stmt = as_a <gcond *> (stmt);
8744 if ((ctx || task_shared_vars)
8745 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8746 lower_omp_regimplify_p,
8747 ctx ? NULL : &wi, NULL)
8748 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8749 lower_omp_regimplify_p,
8750 ctx ? NULL : &wi, NULL)))
8751 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8753 break;
8754 case GIMPLE_CATCH:
8755 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8756 break;
8757 case GIMPLE_EH_FILTER:
8758 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8759 break;
8760 case GIMPLE_TRY:
8761 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8762 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8763 break;
8764 case GIMPLE_TRANSACTION:
8765 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8766 ctx);
8767 break;
8768 case GIMPLE_BIND:
8769 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8770 break;
8771 case GIMPLE_OMP_PARALLEL:
8772 case GIMPLE_OMP_TASK:
8773 ctx = maybe_lookup_ctx (stmt);
8774 gcc_assert (ctx);
8775 if (ctx->cancellable)
8776 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8777 lower_omp_taskreg (gsi_p, ctx);
8778 break;
8779 case GIMPLE_OMP_FOR:
8780 ctx = maybe_lookup_ctx (stmt);
8781 gcc_assert (ctx);
8782 if (ctx->cancellable)
8783 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8784 lower_omp_for (gsi_p, ctx);
8785 break;
8786 case GIMPLE_OMP_SECTIONS:
8787 ctx = maybe_lookup_ctx (stmt);
8788 gcc_assert (ctx);
8789 if (ctx->cancellable)
8790 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8791 lower_omp_sections (gsi_p, ctx);
8792 break;
8793 case GIMPLE_OMP_SINGLE:
8794 ctx = maybe_lookup_ctx (stmt);
8795 gcc_assert (ctx);
8796 lower_omp_single (gsi_p, ctx);
8797 break;
8798 case GIMPLE_OMP_MASTER:
8799 ctx = maybe_lookup_ctx (stmt);
8800 gcc_assert (ctx);
8801 lower_omp_master (gsi_p, ctx);
8802 break;
8803 case GIMPLE_OMP_TASKGROUP:
8804 ctx = maybe_lookup_ctx (stmt);
8805 gcc_assert (ctx);
8806 lower_omp_taskgroup (gsi_p, ctx);
8807 break;
8808 case GIMPLE_OMP_ORDERED:
8809 ctx = maybe_lookup_ctx (stmt);
8810 gcc_assert (ctx);
8811 lower_omp_ordered (gsi_p, ctx);
8812 break;
8813 case GIMPLE_OMP_CRITICAL:
8814 ctx = maybe_lookup_ctx (stmt);
8815 gcc_assert (ctx);
8816 lower_omp_critical (gsi_p, ctx);
8817 break;
8818 case GIMPLE_OMP_ATOMIC_LOAD:
8819 if ((ctx || task_shared_vars)
8820 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8821 as_a <gomp_atomic_load *> (stmt)),
8822 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8823 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8824 break;
8825 case GIMPLE_OMP_TARGET:
8826 ctx = maybe_lookup_ctx (stmt);
8827 gcc_assert (ctx);
8828 lower_omp_target (gsi_p, ctx);
8829 break;
8830 case GIMPLE_OMP_TEAMS:
8831 ctx = maybe_lookup_ctx (stmt);
8832 gcc_assert (ctx);
8833 lower_omp_teams (gsi_p, ctx);
8834 break;
8835 case GIMPLE_OMP_GRID_BODY:
8836 ctx = maybe_lookup_ctx (stmt);
8837 gcc_assert (ctx);
8838 lower_omp_grid_body (gsi_p, ctx);
8839 break;
8840 case GIMPLE_CALL:
8841 tree fndecl;
8842 call_stmt = as_a <gcall *> (stmt);
8843 fndecl = gimple_call_fndecl (call_stmt);
8844 if (fndecl
8845 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8846 switch (DECL_FUNCTION_CODE (fndecl))
8848 case BUILT_IN_GOMP_BARRIER:
8849 if (ctx == NULL)
8850 break;
8851 /* FALLTHRU */
8852 case BUILT_IN_GOMP_CANCEL:
8853 case BUILT_IN_GOMP_CANCELLATION_POINT:
8854 omp_context *cctx;
8855 cctx = ctx;
8856 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8857 cctx = cctx->outer;
8858 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8859 if (!cctx->cancellable)
8861 if (DECL_FUNCTION_CODE (fndecl)
8862 == BUILT_IN_GOMP_CANCELLATION_POINT)
8864 stmt = gimple_build_nop ();
8865 gsi_replace (gsi_p, stmt, false);
8867 break;
8869 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8871 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8872 gimple_call_set_fndecl (call_stmt, fndecl);
8873 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8875 tree lhs;
8876 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8877 gimple_call_set_lhs (call_stmt, lhs);
8878 tree fallthru_label;
8879 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8880 gimple *g;
8881 g = gimple_build_label (fallthru_label);
8882 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8883 g = gimple_build_cond (NE_EXPR, lhs,
8884 fold_convert (TREE_TYPE (lhs),
8885 boolean_false_node),
8886 cctx->cancel_label, fallthru_label);
8887 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8888 break;
8889 default:
8890 break;
8892 /* FALLTHRU */
8893 default:
8894 if ((ctx || task_shared_vars)
8895 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8896 ctx ? NULL : &wi))
8898 /* Just remove clobbers, this should happen only if we have
8899 "privatized" local addressable variables in SIMD regions,
8900 the clobber isn't needed in that case and gimplifying address
8901 of the ARRAY_REF into a pointer and creating MEM_REF based
8902 clobber would create worse code than we get with the clobber
8903 dropped. */
8904 if (gimple_clobber_p (stmt))
8906 gsi_replace (gsi_p, gimple_build_nop (), true);
8907 break;
8909 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8911 break;
8915 static void
8916 lower_omp (gimple_seq *body, omp_context *ctx)
8918 location_t saved_location = input_location;
8919 gimple_stmt_iterator gsi;
8920 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8921 lower_omp_1 (&gsi, ctx);
8922 /* During gimplification, we haven't folded statments inside offloading
8923 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8924 if (target_nesting_level || taskreg_nesting_level)
8925 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8926 fold_stmt (&gsi);
8927 input_location = saved_location;
8930 /* Main entry point. */
8932 static unsigned int
8933 execute_lower_omp (void)
8935 gimple_seq body;
8936 int i;
8937 omp_context *ctx;
8939 /* This pass always runs, to provide PROP_gimple_lomp.
8940 But often, there is nothing to do. */
8941 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8942 && flag_openmp_simd == 0)
8943 return 0;
8945 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8946 delete_omp_context);
8948 body = gimple_body (current_function_decl);
8950 if (hsa_gen_requested_p ())
8951 omp_grid_gridify_all_targets (&body);
8953 scan_omp (&body, NULL);
8954 gcc_assert (taskreg_nesting_level == 0);
8955 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8956 finish_taskreg_scan (ctx);
8957 taskreg_contexts.release ();
8959 if (all_contexts->root)
8961 if (task_shared_vars)
8962 push_gimplify_context ();
8963 lower_omp (&body, NULL);
8964 if (task_shared_vars)
8965 pop_gimplify_context (NULL);
8968 if (all_contexts)
8970 splay_tree_delete (all_contexts);
8971 all_contexts = NULL;
8973 BITMAP_FREE (task_shared_vars);
8974 return 0;
8977 namespace {
8979 const pass_data pass_data_lower_omp =
8981 GIMPLE_PASS, /* type */
8982 "omplower", /* name */
8983 OPTGROUP_OMP, /* optinfo_flags */
8984 TV_NONE, /* tv_id */
8985 PROP_gimple_any, /* properties_required */
8986 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8987 0, /* properties_destroyed */
8988 0, /* todo_flags_start */
8989 0, /* todo_flags_finish */
8992 class pass_lower_omp : public gimple_opt_pass
8994 public:
8995 pass_lower_omp (gcc::context *ctxt)
8996 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8999 /* opt_pass methods: */
9000 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9002 }; // class pass_lower_omp
9004 } // anon namespace
9006 gimple_opt_pass *
9007 make_pass_lower_omp (gcc::context *ctxt)
9009 return new pass_lower_omp (ctxt);
9012 /* The following is a utility to diagnose structured block violations.
9013 It is not part of the "omplower" pass, as that's invoked too late. It
9014 should be invoked by the respective front ends after gimplification. */
9016 static splay_tree all_labels;
9018 /* Check for mismatched contexts and generate an error if needed. Return
9019 true if an error is detected. */
9021 static bool
9022 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9023 gimple *branch_ctx, gimple *label_ctx)
9025 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9026 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9028 if (label_ctx == branch_ctx)
9029 return false;
9031 const char* kind = NULL;
9033 if (flag_cilkplus)
9035 if ((branch_ctx
9036 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9037 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9038 || (label_ctx
9039 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9040 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9041 kind = "Cilk Plus";
9043 if (flag_openacc)
9045 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9046 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9048 gcc_checking_assert (kind == NULL);
9049 kind = "OpenACC";
9052 if (kind == NULL)
9054 gcc_checking_assert (flag_openmp);
9055 kind = "OpenMP";
9058 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9059 so we could traverse it and issue a correct "exit" or "enter" error
9060 message upon a structured block violation.
9062 We built the context by building a list with tree_cons'ing, but there is
9063 no easy counterpart in gimple tuples. It seems like far too much work
9064 for issuing exit/enter error messages. If someone really misses the
9065 distinct error message... patches welcome. */
9067 #if 0
9068 /* Try to avoid confusing the user by producing and error message
9069 with correct "exit" or "enter" verbiage. We prefer "exit"
9070 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9071 if (branch_ctx == NULL)
9072 exit_p = false;
9073 else
9075 while (label_ctx)
9077 if (TREE_VALUE (label_ctx) == branch_ctx)
9079 exit_p = false;
9080 break;
9082 label_ctx = TREE_CHAIN (label_ctx);
9086 if (exit_p)
9087 error ("invalid exit from %s structured block", kind);
9088 else
9089 error ("invalid entry to %s structured block", kind);
9090 #endif
9092 /* If it's obvious we have an invalid entry, be specific about the error. */
9093 if (branch_ctx == NULL)
9094 error ("invalid entry to %s structured block", kind);
9095 else
9097 /* Otherwise, be vague and lazy, but efficient. */
9098 error ("invalid branch to/from %s structured block", kind);
9101 gsi_replace (gsi_p, gimple_build_nop (), false);
9102 return true;
9105 /* Pass 1: Create a minimal tree of structured blocks, and record
9106 where each label is found. */
9108 static tree
9109 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9110 struct walk_stmt_info *wi)
9112 gimple *context = (gimple *) wi->info;
9113 gimple *inner_context;
9114 gimple *stmt = gsi_stmt (*gsi_p);
9116 *handled_ops_p = true;
9118 switch (gimple_code (stmt))
9120 WALK_SUBSTMTS;
9122 case GIMPLE_OMP_PARALLEL:
9123 case GIMPLE_OMP_TASK:
9124 case GIMPLE_OMP_SECTIONS:
9125 case GIMPLE_OMP_SINGLE:
9126 case GIMPLE_OMP_SECTION:
9127 case GIMPLE_OMP_MASTER:
9128 case GIMPLE_OMP_ORDERED:
9129 case GIMPLE_OMP_CRITICAL:
9130 case GIMPLE_OMP_TARGET:
9131 case GIMPLE_OMP_TEAMS:
9132 case GIMPLE_OMP_TASKGROUP:
9133 /* The minimal context here is just the current OMP construct. */
9134 inner_context = stmt;
9135 wi->info = inner_context;
9136 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9137 wi->info = context;
9138 break;
9140 case GIMPLE_OMP_FOR:
9141 inner_context = stmt;
9142 wi->info = inner_context;
9143 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9144 walk them. */
9145 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9146 diagnose_sb_1, NULL, wi);
9147 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9148 wi->info = context;
9149 break;
9151 case GIMPLE_LABEL:
9152 splay_tree_insert (all_labels,
9153 (splay_tree_key) gimple_label_label (
9154 as_a <glabel *> (stmt)),
9155 (splay_tree_value) context);
9156 break;
9158 default:
9159 break;
9162 return NULL_TREE;
9165 /* Pass 2: Check each branch and see if its context differs from that of
9166 the destination label's context. */
9168 static tree
9169 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9170 struct walk_stmt_info *wi)
9172 gimple *context = (gimple *) wi->info;
9173 splay_tree_node n;
9174 gimple *stmt = gsi_stmt (*gsi_p);
9176 *handled_ops_p = true;
9178 switch (gimple_code (stmt))
9180 WALK_SUBSTMTS;
9182 case GIMPLE_OMP_PARALLEL:
9183 case GIMPLE_OMP_TASK:
9184 case GIMPLE_OMP_SECTIONS:
9185 case GIMPLE_OMP_SINGLE:
9186 case GIMPLE_OMP_SECTION:
9187 case GIMPLE_OMP_MASTER:
9188 case GIMPLE_OMP_ORDERED:
9189 case GIMPLE_OMP_CRITICAL:
9190 case GIMPLE_OMP_TARGET:
9191 case GIMPLE_OMP_TEAMS:
9192 case GIMPLE_OMP_TASKGROUP:
9193 wi->info = stmt;
9194 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9195 wi->info = context;
9196 break;
9198 case GIMPLE_OMP_FOR:
9199 wi->info = stmt;
9200 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9201 walk them. */
9202 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9203 diagnose_sb_2, NULL, wi);
9204 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9205 wi->info = context;
9206 break;
9208 case GIMPLE_COND:
9210 gcond *cond_stmt = as_a <gcond *> (stmt);
9211 tree lab = gimple_cond_true_label (cond_stmt);
9212 if (lab)
9214 n = splay_tree_lookup (all_labels,
9215 (splay_tree_key) lab);
9216 diagnose_sb_0 (gsi_p, context,
9217 n ? (gimple *) n->value : NULL);
9219 lab = gimple_cond_false_label (cond_stmt);
9220 if (lab)
9222 n = splay_tree_lookup (all_labels,
9223 (splay_tree_key) lab);
9224 diagnose_sb_0 (gsi_p, context,
9225 n ? (gimple *) n->value : NULL);
9228 break;
9230 case GIMPLE_GOTO:
9232 tree lab = gimple_goto_dest (stmt);
9233 if (TREE_CODE (lab) != LABEL_DECL)
9234 break;
9236 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9237 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9239 break;
9241 case GIMPLE_SWITCH:
9243 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9244 unsigned int i;
9245 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9247 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9248 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9249 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9250 break;
9253 break;
9255 case GIMPLE_RETURN:
9256 diagnose_sb_0 (gsi_p, context, NULL);
9257 break;
9259 default:
9260 break;
9263 return NULL_TREE;
9266 static unsigned int
9267 diagnose_omp_structured_block_errors (void)
9269 struct walk_stmt_info wi;
9270 gimple_seq body = gimple_body (current_function_decl);
9272 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9274 memset (&wi, 0, sizeof (wi));
9275 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9277 memset (&wi, 0, sizeof (wi));
9278 wi.want_locations = true;
9279 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9281 gimple_set_body (current_function_decl, body);
9283 splay_tree_delete (all_labels);
9284 all_labels = NULL;
9286 return 0;
9289 namespace {
9291 const pass_data pass_data_diagnose_omp_blocks =
9293 GIMPLE_PASS, /* type */
9294 "*diagnose_omp_blocks", /* name */
9295 OPTGROUP_OMP, /* optinfo_flags */
9296 TV_NONE, /* tv_id */
9297 PROP_gimple_any, /* properties_required */
9298 0, /* properties_provided */
9299 0, /* properties_destroyed */
9300 0, /* todo_flags_start */
9301 0, /* todo_flags_finish */
9304 class pass_diagnose_omp_blocks : public gimple_opt_pass
9306 public:
9307 pass_diagnose_omp_blocks (gcc::context *ctxt)
9308 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9311 /* opt_pass methods: */
9312 virtual bool gate (function *)
9314 return flag_cilkplus || flag_openacc || flag_openmp;
9316 virtual unsigned int execute (function *)
9318 return diagnose_omp_structured_block_errors ();
9321 }; // class pass_diagnose_omp_blocks
9323 } // anon namespace
9325 gimple_opt_pass *
9326 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9328 return new pass_diagnose_omp_blocks (ctxt);
9332 #include "gt-omp-low.h"